You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by sp...@apache.org on 2016/05/28 02:10:10 UTC

[01/34] hive git commit: HIVE-13587: Set Hive pom to use Hadoop 2.6.1 (Mohit Sabharwal, reviewd by Sergio Pena)

Repository: hive
Updated Branches:
  refs/heads/master 0d67cb0b7 -> f38a42e52


HIVE-13587: Set Hive pom to use Hadoop 2.6.1 (Mohit Sabharwal, reviewd by Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0bd21b59
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0bd21b59
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0bd21b59

Branch: refs/heads/master
Commit: 0bd21b59843668d5bdf426648ca0be6c21291934
Parents: 0d67cb0
Author: Sergio Pena <se...@cloudera.com>
Authored: Mon May 2 13:08:54 2016 -0500
Committer: Sergio Pena <se...@cloudera.com>
Committed: Fri May 27 21:08:30 2016 -0500

----------------------------------------------------------------------
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/0bd21b59/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 117aec9..3176caf 100644
--- a/pom.xml
+++ b/pom.xml
@@ -132,7 +132,7 @@
     <dropwizard-metrics-hadoop-metrics2-reporter.version>0.1.0</dropwizard-metrics-hadoop-metrics2-reporter.version>
     <guava.version>14.0.1</guava.version>
     <groovy.version>2.4.4</groovy.version>
-    <hadoop.version>2.6.0</hadoop.version>
+    <hadoop.version>2.6.1</hadoop.version>
     <hadoop.bin.path>${basedir}/${hive.path.to.root}/testutils/hadoop</hadoop.bin.path>
     <hbase.version>1.1.1</hbase.version>
     <!-- required for logging test to avoid including hbase which pulls disruptor transitively -->


[32/34] hive git commit: HIVE-13409: Fix JDK8 test failures related to COLUMN_STATS_ACCURATE (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out b/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out
index 9e9e61f..4352914 100644
--- a/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out
+++ b/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out
@@ -70,7 +70,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -116,7 +116,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -259,7 +259,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -305,7 +305,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -351,7 +351,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -397,7 +397,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out b/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out
index e03c055..f3fd8f8 100644
--- a/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out
+++ b/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out
@@ -55,7 +55,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -75,7 +75,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out b/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
index de1d6f4..df42672 100644
--- a/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
+++ b/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
@@ -87,7 +87,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -133,7 +133,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out b/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
index eabf9d9..6377e95 100644
--- a/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
+++ b/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
@@ -21,7 +21,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -109,7 +109,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/reduce_deduplicate.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/reduce_deduplicate.q.out b/ql/src/test/results/clientpositive/reduce_deduplicate.q.out
index 075336b..dfcbd7d 100644
--- a/ql/src/test/results/clientpositive/reduce_deduplicate.q.out
+++ b/ql/src/test/results/clientpositive/reduce_deduplicate.q.out
@@ -49,7 +49,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -69,7 +69,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/regexp_extract.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/regexp_extract.q.out b/ql/src/test/results/clientpositive/regexp_extract.q.out
index 7026df3..fb7794c 100644
--- a/ql/src/test/results/clientpositive/regexp_extract.q.out
+++ b/ql/src/test/results/clientpositive/regexp_extract.q.out
@@ -68,7 +68,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -88,7 +88,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -317,7 +317,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -337,7 +337,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/router_join_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/router_join_ppr.q.out b/ql/src/test/results/clientpositive/router_join_ppr.q.out
index 1f79be9..bf46fe9 100644
--- a/ql/src/test/results/clientpositive/router_join_ppr.q.out
+++ b/ql/src/test/results/clientpositive/router_join_ppr.q.out
@@ -79,7 +79,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -99,7 +99,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -126,7 +126,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -172,7 +172,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -218,7 +218,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -264,7 +264,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -474,7 +474,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -494,7 +494,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -521,7 +521,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -567,7 +567,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -770,7 +770,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -790,7 +790,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -817,7 +817,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -863,7 +863,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1062,7 +1062,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1082,7 +1082,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1109,7 +1109,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1155,7 +1155,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/sample1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample1.q.out b/ql/src/test/results/clientpositive/sample1.q.out
index 57e61b9..f57519b 100644
--- a/ql/src/test/results/clientpositive/sample1.q.out
+++ b/ql/src/test/results/clientpositive/sample1.q.out
@@ -87,7 +87,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/sample2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample2.q.out b/ql/src/test/results/clientpositive/sample2.q.out
index 92f0d5a..096805d 100644
--- a/ql/src/test/results/clientpositive/sample2.q.out
+++ b/ql/src/test/results/clientpositive/sample2.q.out
@@ -85,7 +85,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count 2
               bucket_field_name key
               columns key,value
@@ -106,7 +106,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/sample4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample4.q.out b/ql/src/test/results/clientpositive/sample4.q.out
index b4e58c5..72395c9 100644
--- a/ql/src/test/results/clientpositive/sample4.q.out
+++ b/ql/src/test/results/clientpositive/sample4.q.out
@@ -85,7 +85,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count 2
               bucket_field_name key
               columns key,value
@@ -106,7 +106,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/sample5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample5.q.out b/ql/src/test/results/clientpositive/sample5.q.out
index c786f21..147a567 100644
--- a/ql/src/test/results/clientpositive/sample5.q.out
+++ b/ql/src/test/results/clientpositive/sample5.q.out
@@ -86,7 +86,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count 2
               bucket_field_name key
               columns key,value
@@ -107,7 +107,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/sample6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample6.q.out b/ql/src/test/results/clientpositive/sample6.q.out
index 519647f..a34258d 100644
--- a/ql/src/test/results/clientpositive/sample6.q.out
+++ b/ql/src/test/results/clientpositive/sample6.q.out
@@ -83,7 +83,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count 2
               bucket_field_name key
               columns key,value
@@ -104,7 +104,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value
@@ -652,7 +652,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count 2
               bucket_field_name key
               columns key,value
@@ -673,7 +673,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value
@@ -1021,7 +1021,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count 2
               bucket_field_name key
               columns key,value
@@ -1042,7 +1042,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value
@@ -1643,7 +1643,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count 2
               bucket_field_name key
               columns key,value
@@ -1664,7 +1664,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value
@@ -2108,7 +2108,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count 2
               bucket_field_name key
               columns key,value
@@ -2129,7 +2129,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value
@@ -2560,7 +2560,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count 4
               bucket_field_name key
               columns key,value
@@ -2581,7 +2581,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count 4
                 bucket_field_name key
                 columns key,value
@@ -2606,7 +2606,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count 4
               bucket_field_name key
               columns key,value
@@ -2627,7 +2627,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count 4
                 bucket_field_name key
                 columns key,value
@@ -2859,7 +2859,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count 4
               bucket_field_name key
               columns key,value
@@ -2880,7 +2880,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count 4
                 bucket_field_name key
                 columns key,value

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/sample7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample7.q.out b/ql/src/test/results/clientpositive/sample7.q.out
index 2352cdc..51a45dd 100644
--- a/ql/src/test/results/clientpositive/sample7.q.out
+++ b/ql/src/test/results/clientpositive/sample7.q.out
@@ -84,7 +84,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count 2
               bucket_field_name key
               columns key,value
@@ -105,7 +105,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/sample8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample8.q.out b/ql/src/test/results/clientpositive/sample8.q.out
index dd55fa0..b316331 100644
--- a/ql/src/test/results/clientpositive/sample8.q.out
+++ b/ql/src/test/results/clientpositive/sample8.q.out
@@ -68,7 +68,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -114,7 +114,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -160,7 +160,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -206,7 +206,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/sample9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/sample9.q.out b/ql/src/test/results/clientpositive/sample9.q.out
index 14a512a..a4c3ff6 100644
--- a/ql/src/test/results/clientpositive/sample9.q.out
+++ b/ql/src/test/results/clientpositive/sample9.q.out
@@ -58,7 +58,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count 2
               bucket_field_name key
               columns key,value
@@ -79,7 +79,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count 2
                 bucket_field_name key
                 columns key,value

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/schema_evol_stats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/schema_evol_stats.q.out b/ql/src/test/results/clientpositive/schema_evol_stats.q.out
index 63dab2e..63b4c19 100644
--- a/ql/src/test/results/clientpositive/schema_evol_stats.q.out
+++ b/ql/src/test/results/clientpositive/schema_evol_stats.q.out
@@ -109,7 +109,7 @@ Database:           	default
 Table:              	partitioned1        	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	40                  
@@ -150,7 +150,7 @@ Database:           	default
 Table:              	partitioned1        	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	56                  
@@ -309,7 +309,7 @@ Database:           	default
 Table:              	partitioned1        	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	384                 
@@ -350,7 +350,7 @@ Database:           	default
 Table:              	partitioned1        	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	732                 

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/serde_user_properties.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/serde_user_properties.q.out b/ql/src/test/results/clientpositive/serde_user_properties.q.out
index 1dbc274..d5b81ed 100644
--- a/ql/src/test/results/clientpositive/serde_user_properties.q.out
+++ b/ql/src/test/results/clientpositive/serde_user_properties.q.out
@@ -101,7 +101,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -121,7 +121,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -225,7 +225,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -246,7 +246,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -351,7 +351,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -372,7 +372,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/bucket2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket2.q.out b/ql/src/test/results/clientpositive/spark/bucket2.q.out
index f9d4782..dd23a25 100644
--- a/ql/src/test/results/clientpositive/spark/bucket2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket2.q.out
@@ -57,7 +57,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -77,7 +77,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/bucket3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket3.q.out b/ql/src/test/results/clientpositive/spark/bucket3.q.out
index 39d9c33..f4acd71 100644
--- a/ql/src/test/results/clientpositive/spark/bucket3.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket3.q.out
@@ -57,7 +57,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -77,7 +77,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/bucket4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket4.q.out b/ql/src/test/results/clientpositive/spark/bucket4.q.out
index 68f8143..b1ef928 100644
--- a/ql/src/test/results/clientpositive/spark/bucket4.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket4.q.out
@@ -54,7 +54,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -74,7 +74,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/ctas.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/ctas.q.out b/ql/src/test/results/clientpositive/spark/ctas.q.out
index 5396ada..f7165fc 100644
--- a/ql/src/test/results/clientpositive/spark/ctas.q.out
+++ b/ql/src/test/results/clientpositive/spark/ctas.q.out
@@ -720,7 +720,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -740,7 +740,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out b/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out
index c8503cd..8cefe46 100644
--- a/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out
+++ b/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out
@@ -53,7 +53,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -73,7 +73,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/groupby_map_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_map_ppr.q.out b/ql/src/test/results/clientpositive/spark/groupby_map_ppr.q.out
index f97f63e..beae497 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_map_ppr.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_map_ppr.q.out
@@ -73,7 +73,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -119,7 +119,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/groupby_map_ppr_multi_distinct.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_map_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/spark/groupby_map_ppr_multi_distinct.q.out
index c833657..2ad4d68 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_map_ppr_multi_distinct.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_map_ppr_multi_distinct.q.out
@@ -73,7 +73,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -119,7 +119,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out b/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out
index a2c2ced..f1e1027 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out
@@ -66,7 +66,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -112,7 +112,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out
index 531854b..5251241 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out
@@ -66,7 +66,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -112,7 +112,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/input_part2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/input_part2.q.out b/ql/src/test/results/clientpositive/spark/input_part2.q.out
index 4799a7f..36bb40f 100644
--- a/ql/src/test/results/clientpositive/spark/input_part2.q.out
+++ b/ql/src/test/results/clientpositive/spark/input_part2.q.out
@@ -136,7 +136,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -182,7 +182,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/join17.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join17.q.out b/ql/src/test/results/clientpositive/spark/join17.q.out
index 3acf7f9..a7103cb 100644
--- a/ql/src/test/results/clientpositive/spark/join17.q.out
+++ b/ql/src/test/results/clientpositive/spark/join17.q.out
@@ -62,7 +62,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -82,7 +82,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -134,7 +134,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -154,7 +154,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/join26.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join26.q.out b/ql/src/test/results/clientpositive/spark/join26.q.out
index 4967ab6..cacfe42 100644
--- a/ql/src/test/results/clientpositive/spark/join26.q.out
+++ b/ql/src/test/results/clientpositive/spark/join26.q.out
@@ -60,7 +60,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -80,7 +80,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -127,7 +127,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -147,7 +147,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -245,7 +245,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/join32.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join32.q.out b/ql/src/test/results/clientpositive/spark/join32.q.out
index be29cd5..7cecbc6 100644
--- a/ql/src/test/results/clientpositive/spark/join32.q.out
+++ b/ql/src/test/results/clientpositive/spark/join32.q.out
@@ -67,7 +67,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -87,7 +87,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -137,7 +137,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -157,7 +157,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -267,7 +267,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out b/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
index 8a99a56..97c520c 100644
--- a/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
+++ b/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
@@ -75,7 +75,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -95,7 +95,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -145,7 +145,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -165,7 +165,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -275,7 +275,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -518,7 +518,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -538,7 +538,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -604,7 +604,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -624,7 +624,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -674,7 +674,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -694,7 +694,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -801,7 +801,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -821,7 +821,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1043,7 +1043,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1112,7 +1112,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1132,7 +1132,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1243,7 +1243,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1263,7 +1263,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1487,7 +1487,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1552,7 +1552,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1572,7 +1572,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1683,7 +1683,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1703,7 +1703,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/join33.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join33.q.out b/ql/src/test/results/clientpositive/spark/join33.q.out
index be29cd5..7cecbc6 100644
--- a/ql/src/test/results/clientpositive/spark/join33.q.out
+++ b/ql/src/test/results/clientpositive/spark/join33.q.out
@@ -67,7 +67,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -87,7 +87,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -137,7 +137,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -157,7 +157,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -267,7 +267,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'


[28/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/22541610
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/22541610
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/22541610

Branch: refs/heads/master
Commit: 22541610c1db697ad2eca029f08ad8194c8c373f
Parents: 0bd21b5
Author: Mohit Sabharwal <mo...@cloudera.com>
Authored: Fri May 20 11:14:13 2016 -0500
Committer: Sergio Pena <se...@cloudera.com>
Committed: Fri May 27 21:08:31 2016 -0500

----------------------------------------------------------------------
 .../columnstats_partlvl_invalid_values.q        |    1 -
 .../clientpositive/authorization_explain.q      |    1 -
 ql/src/test/queries/clientpositive/avro_date.q  |    1 -
 .../clientpositive/avro_deserialize_map_null.q  |    1 -
 .../clientpositive/avro_nullable_fields.q       |    1 -
 .../queries/clientpositive/avro_timestamp.q     |    1 -
 .../clientpositive/cbo_rp_outer_join_ppr.q      |    1 -
 ql/src/test/queries/clientpositive/char_udf1.q  |    1 -
 ql/src/test/queries/clientpositive/input4.q     |    1 -
 ql/src/test/queries/clientpositive/join0.q      |    1 -
 .../queries/clientpositive/list_bucket_dml_10.q |    1 -
 .../queries/clientpositive/list_bucket_dml_11.q |    1 -
 .../queries/clientpositive/list_bucket_dml_12.q |    1 -
 .../queries/clientpositive/list_bucket_dml_13.q |    1 -
 .../queries/clientpositive/list_bucket_dml_2.q  |    1 -
 .../queries/clientpositive/list_bucket_dml_4.q  |    1 -
 .../queries/clientpositive/list_bucket_dml_5.q  |    1 -
 .../queries/clientpositive/list_bucket_dml_6.q  |    1 -
 .../queries/clientpositive/list_bucket_dml_8.q  |    1 -
 .../queries/clientpositive/list_bucket_dml_9.q  |    1 -
 .../queries/clientpositive/outer_join_ppr.q     |    1 -
 .../queries/clientpositive/parquet_map_null.q   |    1 -
 ql/src/test/queries/clientpositive/plan_json.q  |    1 -
 .../queries/clientpositive/stats_list_bucket.q  |    1 -
 ql/src/test/queries/clientpositive/str_to_map.q |    1 -
 .../clientpositive/subquery_multiinsert.q       |    1 -
 .../clientpositive/subquery_notin_having.q      |    1 -
 .../test/queries/clientpositive/varchar_udf1.q  |    1 -
 .../clientpositive/vector_cast_constant.q       |    1 -
 ...mnstats_partlvl_invalid_values.q.java1.7.out |   73 --
 ...mnstats_partlvl_invalid_values.q.java1.8.out |   73 --
 .../columnstats_partlvl_invalid_values.q.out    |   69 ++
 .../authorization_explain.q.java1.7.out         |   44 -
 .../authorization_explain.q.java1.8.out         |   47 -
 .../clientpositive/authorization_explain.q.out  |   40 +
 .../clientpositive/avro_date.q.java1.7.out      |  130 --
 .../clientpositive/avro_date.q.java1.8.out      |  130 --
 .../test/results/clientpositive/avro_date.q.out |  126 ++
 .../avro_deserialize_map_null.q.java1.7.out     |   57 -
 .../avro_deserialize_map_null.q.java1.8.out     |   57 -
 .../avro_deserialize_map_null.q.out             |   55 +
 .../avro_nullable_fields.q.java1.7.out          |  179 ---
 .../avro_nullable_fields.q.java1.8.out          |  179 ---
 .../clientpositive/avro_nullable_fields.q.out   |  177 +++
 .../clientpositive/avro_timestamp.q.java1.7.out |  134 ---
 .../clientpositive/avro_timestamp.q.java1.8.out |  134 ---
 .../results/clientpositive/avro_timestamp.q.out |  132 +++
 .../cbo_rp_outer_join_ppr.q.java1.7.out         |  693 -----------
 .../clientpositive/cbo_rp_outer_join_ppr.q.out  |  691 +++++++++++
 .../clientpositive/char_udf1.q.java1.7.out      |  463 --------
 .../clientpositive/char_udf1.q.java1.8.out      |  457 -------
 .../test/results/clientpositive/char_udf1.q.out |  459 +++++++
 .../results/clientpositive/input4.q.java1.7.out |  559 ---------
 .../results/clientpositive/input4.q.java1.8.out |  559 ---------
 ql/src/test/results/clientpositive/input4.q.out |  555 +++++++++
 .../results/clientpositive/join0.q.java1.7.out  |  240 ----
 .../results/clientpositive/join0.q.java1.8.out  |  240 ----
 ql/src/test/results/clientpositive/join0.q.out  |  238 ++++
 .../list_bucket_dml_10.q.java1.7.out            |  361 ------
 .../list_bucket_dml_10.q.java1.8.out            |  389 ------
 .../clientpositive/list_bucket_dml_10.q.out     |  359 ++++++
 .../list_bucket_dml_11.q.java1.7.out            |  329 -----
 .../list_bucket_dml_11.q.java1.8.out            |  424 -------
 .../clientpositive/list_bucket_dml_11.q.out     |  327 +++++
 .../list_bucket_dml_12.q.java1.7.out            |  426 -------
 .../list_bucket_dml_12.q.java1.8.out            |  596 ----------
 .../clientpositive/list_bucket_dml_12.q.out     |  424 +++++++
 .../list_bucket_dml_13.q.java1.7.out            |  337 ------
 .../list_bucket_dml_13.q.java1.8.out            |  439 -------
 .../clientpositive/list_bucket_dml_13.q.out     |  335 ++++++
 .../list_bucket_dml_2.q.java1.7.out             |  591 ---------
 .../list_bucket_dml_2.q.java1.8.out             |  692 -----------
 .../clientpositive/list_bucket_dml_2.q.out      |  589 +++++++++
 .../list_bucket_dml_4.q.java1.7.out             |  813 -------------
 .../list_bucket_dml_4.q.java1.8.out             |  915 --------------
 .../clientpositive/list_bucket_dml_4.q.out      |  811 +++++++++++++
 .../list_bucket_dml_5.q.java1.7.out             |  506 --------
 .../list_bucket_dml_5.q.java1.8.out             |  617 ----------
 .../clientpositive/list_bucket_dml_5.q.out      |  504 ++++++++
 .../list_bucket_dml_6.q.java1.7.out             | 1007 ----------------
 .../list_bucket_dml_6.q.java1.8.out             | 1119 ------------------
 .../clientpositive/list_bucket_dml_6.q.out      | 1005 ++++++++++++++++
 .../list_bucket_dml_8.q.java1.7.out             |  641 ----------
 .../list_bucket_dml_8.q.java1.8.out             |  712 -----------
 .../clientpositive/list_bucket_dml_8.q.out      |  639 ++++++++++
 .../list_bucket_dml_9.q.java1.7.out             |  813 -------------
 .../list_bucket_dml_9.q.java1.8.out             |  915 --------------
 .../clientpositive/list_bucket_dml_9.q.out      |  811 +++++++++++++
 .../clientpositive/llap/join0.q.java1.7.out     |  242 ----
 .../clientpositive/llap/join0.q.java1.8.out     |  242 ----
 .../results/clientpositive/llap/join0.q.out     |  243 ++++
 .../llap/vector_cast_constant.q.java1.7.out     |  217 ----
 .../llap/vector_cast_constant.q.java1.8.out     |  217 ----
 .../llap/vector_cast_constant.q.out             |  216 ++++
 .../clientpositive/outer_join_ppr.q.java1.7.out |  685 -----------
 .../clientpositive/outer_join_ppr.q.java1.8.out |  855 -------------
 .../results/clientpositive/outer_join_ppr.q.out |  683 +++++++++++
 .../parquet_map_null.q.java1.7.out              |   70 --
 .../parquet_map_null.q.java1.8.out              |   70 --
 .../clientpositive/parquet_map_null.q.out       |   68 ++
 .../clientpositive/plan_json.q.java1.7.out      |   13 -
 .../clientpositive/plan_json.q.java1.8.out      |   13 -
 .../test/results/clientpositive/plan_json.q.out |   11 +
 .../clientpositive/spark/join0.q.java1.7.out    |  238 ----
 .../clientpositive/spark/join0.q.java1.8.out    |  238 ----
 .../results/clientpositive/spark/join0.q.out    |   20 +-
 .../spark/list_bucket_dml_10.q.java1.7.out      |  252 ----
 .../spark/list_bucket_dml_10.q.java1.8.out      |  280 -----
 .../spark/list_bucket_dml_10.q.out              |  250 ++++
 .../spark/list_bucket_dml_2.q.java1.7.out       |  591 ---------
 .../spark/list_bucket_dml_2.q.java1.8.out       |  663 -----------
 .../spark/list_bucket_dml_2.q.out               |  Bin 28667 -> 27128 bytes
 .../spark/outer_join_ppr.q.java1.7.out          |  709 -----------
 .../spark/outer_join_ppr.q.java1.8.out          |  879 --------------
 .../clientpositive/spark/outer_join_ppr.q.out   |  490 ++------
 .../spark/subquery_multiinsert.q.java1.7.out    |  886 --------------
 .../spark/subquery_multiinsert.q.java1.8.out    |  890 --------------
 .../spark/subquery_multiinsert.q.out            |   56 +-
 .../spark/vector_cast_constant.q.java1.7.out    |  217 ----
 .../spark/vector_cast_constant.q.java1.8.out    |  203 ----
 .../spark/vector_cast_constant.q.out            |   54 +-
 .../stats_list_bucket.q.java1.7.out             |  191 ---
 .../stats_list_bucket.q.java1.8.out             |  193 ---
 .../clientpositive/stats_list_bucket.q.out      |  189 +++
 .../clientpositive/str_to_map.q.java1.7.out     |  220 ----
 .../clientpositive/str_to_map.q.java1.8.out     |  219 ----
 .../results/clientpositive/str_to_map.q.out     |  216 ++++
 .../subquery_multiinsert.q.java1.7.out          |  999 ----------------
 .../subquery_multiinsert.q.java1.8.out          |  999 ----------------
 .../clientpositive/subquery_multiinsert.q.out   |  997 ++++++++++++++++
 .../subquery_notin_having.q.java1.7.out         |  766 ------------
 .../subquery_notin_having.q.java1.8.out         |  762 ------------
 .../clientpositive/subquery_notin_having.q.out  |  764 ++++++++++++
 .../clientpositive/tez/join0.q.java1.7.out      |  239 ----
 .../clientpositive/tez/join0.q.java1.8.out      |  236 ----
 .../test/results/clientpositive/tez/join0.q.out |  237 ++++
 .../tez/vector_cast_constant.q.java1.7.out      |  218 ----
 .../tez/vector_cast_constant.q.java1.8.out      |  216 ----
 .../tez/vector_cast_constant.q.out              |  214 ++++
 .../clientpositive/varchar_udf1.q.java1.7.out   |  457 -------
 .../clientpositive/varchar_udf1.q.java1.8.out   |  457 -------
 .../results/clientpositive/varchar_udf1.q.out   |  453 +++++++
 .../vector_cast_constant.q.java1.7.out          |  220 ----
 .../vector_cast_constant.q.java1.8.out          |  197 ---
 .../clientpositive/vector_cast_constant.q.out   |   53 +-
 145 files changed, 13120 insertions(+), 32818 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientnegative/columnstats_partlvl_invalid_values.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/columnstats_partlvl_invalid_values.q b/ql/src/test/queries/clientnegative/columnstats_partlvl_invalid_values.q
index 712ece7..8521631 100644
--- a/ql/src/test/queries/clientnegative/columnstats_partlvl_invalid_values.q
+++ b/ql/src/test/queries/clientnegative/columnstats_partlvl_invalid_values.q
@@ -1,4 +1,3 @@
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 DROP TABLE Employee_Part;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/authorization_explain.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/authorization_explain.q b/ql/src/test/queries/clientpositive/authorization_explain.q
index 6a9475c..d429704 100644
--- a/ql/src/test/queries/clientpositive/authorization_explain.q
+++ b/ql/src/test/queries/clientpositive/authorization_explain.q
@@ -2,7 +2,6 @@ set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.autho
 set hive.mapred.mode=nonstrict;
 set hive.security.authorization.enabled=true;
 
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 explain authorization select * from src join srcpart;
 explain formatted authorization select * from src join srcpart;

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/avro_date.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/avro_date.q b/ql/src/test/queries/clientpositive/avro_date.q
index 15c07de..7169822 100644
--- a/ql/src/test/queries/clientpositive/avro_date.q
+++ b/ql/src/test/queries/clientpositive/avro_date.q
@@ -1,5 +1,4 @@
 set hive.mapred.mode=nonstrict;
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 DROP TABLE avro_date_staging;
 DROP TABLE avro_date;

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/avro_deserialize_map_null.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/avro_deserialize_map_null.q b/ql/src/test/queries/clientpositive/avro_deserialize_map_null.q
index 962e649..42258d9 100644
--- a/ql/src/test/queries/clientpositive/avro_deserialize_map_null.q
+++ b/ql/src/test/queries/clientpositive/avro_deserialize_map_null.q
@@ -4,7 +4,6 @@
 -- fileSchema   = [{ "type" : "map", "values" : ["string","null"]}, "null"]
 -- recordSchema = ["null", { "type" : "map", "values" : ["string","null"]}]
 
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 DROP TABLE IF EXISTS avro_table;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/avro_nullable_fields.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/avro_nullable_fields.q b/ql/src/test/queries/clientpositive/avro_nullable_fields.q
index 9ba7441..cb398d6 100644
--- a/ql/src/test/queries/clientpositive/avro_nullable_fields.q
+++ b/ql/src/test/queries/clientpositive/avro_nullable_fields.q
@@ -1,6 +1,5 @@
 -- Verify that nullable fields properly work
 
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 CREATE TABLE test_serializer(string1 STRING,
                              int1 INT,

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/avro_timestamp.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/avro_timestamp.q b/ql/src/test/queries/clientpositive/avro_timestamp.q
index 7bf0dc8..847f250 100644
--- a/ql/src/test/queries/clientpositive/avro_timestamp.q
+++ b/ql/src/test/queries/clientpositive/avro_timestamp.q
@@ -1,7 +1,6 @@
 set hive.mapred.mode=nonstrict;
 -- Exclude test on Windows due to space character being escaped in Hive paths on Windows.
 -- EXCLUDE_OS_WINDOWS
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 DROP TABLE avro_timestamp_staging;
 DROP TABLE avro_timestamp;

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/cbo_rp_outer_join_ppr.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/cbo_rp_outer_join_ppr.q b/ql/src/test/queries/clientpositive/cbo_rp_outer_join_ppr.q
index c497ce9..d8f726e 100644
--- a/ql/src/test/queries/clientpositive/cbo_rp_outer_join_ppr.q
+++ b/ql/src/test/queries/clientpositive/cbo_rp_outer_join_ppr.q
@@ -4,7 +4,6 @@ set hive.cbo.returnpath.hiveop=true;
 set hive.optimize.ppd=true;
 
 -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 EXPLAIN EXTENDED
  FROM 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/char_udf1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/char_udf1.q b/ql/src/test/queries/clientpositive/char_udf1.q
index 09012b4..39aa0e0 100644
--- a/ql/src/test/queries/clientpositive/char_udf1.q
+++ b/ql/src/test/queries/clientpositive/char_udf1.q
@@ -4,7 +4,6 @@ create table char_udf_1 (c1 string, c2 string, c3 char(10), c4 char(20));
 insert overwrite table char_udf_1
   select key, value, key, value from src where key = '238' limit 1;
 
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 -- UDFs with char support
 select 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/input4.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/input4.q b/ql/src/test/queries/clientpositive/input4.q
index 83edbe2..90fcbdd 100644
--- a/ql/src/test/queries/clientpositive/input4.q
+++ b/ql/src/test/queries/clientpositive/input4.q
@@ -1,4 +1,3 @@
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE;
 EXPLAIN

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/join0.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/join0.q b/ql/src/test/queries/clientpositive/join0.q
index 66f2ef3..3252847 100644
--- a/ql/src/test/queries/clientpositive/join0.q
+++ b/ql/src/test/queries/clientpositive/join0.q
@@ -1,6 +1,5 @@
 set hive.mapred.mode=nonstrict;
 set hive.explain.user=false;
--- JAVA_VERSION_SPECIFIC_OUTPUT
 -- SORT_QUERY_RESULTS
 
 EXPLAIN

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/list_bucket_dml_10.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/list_bucket_dml_10.q b/ql/src/test/queries/clientpositive/list_bucket_dml_10.q
index 5d3dade..f25c174 100644
--- a/ql/src/test/queries/clientpositive/list_bucket_dml_10.q
+++ b/ql/src/test/queries/clientpositive/list_bucket_dml_10.q
@@ -1,7 +1,6 @@
 set mapred.input.dir.recursive=true;
 
 -- run this test case in minimr to ensure it works in cluster
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 -- list bucketing DML: static partition. multiple skewed columns.
 -- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/list_bucket_dml_11.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/list_bucket_dml_11.q b/ql/src/test/queries/clientpositive/list_bucket_dml_11.q
index 2d22d66..8ac1627 100644
--- a/ql/src/test/queries/clientpositive/list_bucket_dml_11.q
+++ b/ql/src/test/queries/clientpositive/list_bucket_dml_11.q
@@ -6,7 +6,6 @@ set hive.merge.mapredfiles=false;
 -- Ensure it works if skewed column is not the first column in the table columns
 
 -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 -- list bucketing DML: static partition. multiple skewed columns.
 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/list_bucket_dml_12.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/list_bucket_dml_12.q b/ql/src/test/queries/clientpositive/list_bucket_dml_12.q
index ac063cc..9facfa5 100644
--- a/ql/src/test/queries/clientpositive/list_bucket_dml_12.q
+++ b/ql/src/test/queries/clientpositive/list_bucket_dml_12.q
@@ -7,7 +7,6 @@ set hive.merge.mapredfiles=false;
 
 -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
 -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 -- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
 create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/list_bucket_dml_13.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/list_bucket_dml_13.q b/ql/src/test/queries/clientpositive/list_bucket_dml_13.q
index d68ca93..0fe7f61 100644
--- a/ql/src/test/queries/clientpositive/list_bucket_dml_13.q
+++ b/ql/src/test/queries/clientpositive/list_bucket_dml_13.q
@@ -7,7 +7,6 @@ set hive.merge.mapredfiles=false;
 
 -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
 -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 -- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
 create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/list_bucket_dml_2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/list_bucket_dml_2.q b/ql/src/test/queries/clientpositive/list_bucket_dml_2.q
index 263a002..c6dceab 100644
--- a/ql/src/test/queries/clientpositive/list_bucket_dml_2.q
+++ b/ql/src/test/queries/clientpositive/list_bucket_dml_2.q
@@ -10,7 +10,6 @@ set hive.stats.reliable=true;
 
 -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
 -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 -- list bucketing DML: static partition. multiple skewed columns.
 -- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/list_bucket_dml_4.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/list_bucket_dml_4.q b/ql/src/test/queries/clientpositive/list_bucket_dml_4.q
index 86ff342..950409d 100644
--- a/ql/src/test/queries/clientpositive/list_bucket_dml_4.q
+++ b/ql/src/test/queries/clientpositive/list_bucket_dml_4.q
@@ -9,7 +9,6 @@ set hive.merge.mapredfiles=false;
 
 -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
 -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 -- list bucketing DML: static partition. multiple skewed columns. merge.
 -- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/list_bucket_dml_5.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/list_bucket_dml_5.q b/ql/src/test/queries/clientpositive/list_bucket_dml_5.q
index ace7ba9..fce8e2e 100644
--- a/ql/src/test/queries/clientpositive/list_bucket_dml_5.q
+++ b/ql/src/test/queries/clientpositive/list_bucket_dml_5.q
@@ -10,7 +10,6 @@ set mapred.input.dir.recursive=true;
 
 -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
 -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 -- create a skewed table
 create table list_bucketing_dynamic_part (key String, value String) 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/list_bucket_dml_6.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/list_bucket_dml_6.q b/ql/src/test/queries/clientpositive/list_bucket_dml_6.q
index 5684788..631c938 100644
--- a/ql/src/test/queries/clientpositive/list_bucket_dml_6.q
+++ b/ql/src/test/queries/clientpositive/list_bucket_dml_6.q
@@ -47,7 +47,6 @@ set hive.merge.mapredfiles=false;
 
 -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
 -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 -- create a skewed table
 create table list_bucketing_dynamic_part (key String, value String) 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/list_bucket_dml_8.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/list_bucket_dml_8.q b/ql/src/test/queries/clientpositive/list_bucket_dml_8.q
index d904543..6d73896 100644
--- a/ql/src/test/queries/clientpositive/list_bucket_dml_8.q
+++ b/ql/src/test/queries/clientpositive/list_bucket_dml_8.q
@@ -48,7 +48,6 @@ set hive.merge.mapredfiles=false;
 -- 118 000002_0 
 
 -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 -- create a skewed table
 create table list_bucketing_dynamic_part (key String, value String) 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/list_bucket_dml_9.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/list_bucket_dml_9.q b/ql/src/test/queries/clientpositive/list_bucket_dml_9.q
index 620750c..d2e24af 100644
--- a/ql/src/test/queries/clientpositive/list_bucket_dml_9.q
+++ b/ql/src/test/queries/clientpositive/list_bucket_dml_9.q
@@ -9,7 +9,6 @@ set hive.merge.mapredfiles=false;
 
 -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
 -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 -- list bucketing DML: static partition. multiple skewed columns. merge.
 -- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/outer_join_ppr.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/outer_join_ppr.q b/ql/src/test/queries/clientpositive/outer_join_ppr.q
index 497a4d1..60a06ae 100644
--- a/ql/src/test/queries/clientpositive/outer_join_ppr.q
+++ b/ql/src/test/queries/clientpositive/outer_join_ppr.q
@@ -2,7 +2,6 @@ set hive.mapred.mode=nonstrict;
 set hive.optimize.ppd=true;
 
 -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 EXPLAIN EXTENDED
  FROM 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/parquet_map_null.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/parquet_map_null.q b/ql/src/test/queries/clientpositive/parquet_map_null.q
index 61058f3..e154159 100644
--- a/ql/src/test/queries/clientpositive/parquet_map_null.q
+++ b/ql/src/test/queries/clientpositive/parquet_map_null.q
@@ -1,5 +1,4 @@
 -- This test attempts to write a parquet table from an avro table that contains map null values
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 DROP TABLE IF EXISTS avro_table;
 DROP TABLE IF EXISTS parquet_table;

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/plan_json.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/plan_json.q b/ql/src/test/queries/clientpositive/plan_json.q
index aa2b134..503b55d 100644
--- a/ql/src/test/queries/clientpositive/plan_json.q
+++ b/ql/src/test/queries/clientpositive/plan_json.q
@@ -1,5 +1,4 @@
 -- explain plan json:  the query gets the formatted json output of the query plan of the hive query
 
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 EXPLAIN FORMATTED SELECT count(1) FROM src;

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/stats_list_bucket.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/stats_list_bucket.q b/ql/src/test/queries/clientpositive/stats_list_bucket.q
index 51137a8..536702c 100644
--- a/ql/src/test/queries/clientpositive/stats_list_bucket.q
+++ b/ql/src/test/queries/clientpositive/stats_list_bucket.q
@@ -1,6 +1,5 @@
 
 -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 drop table stats_list_bucket;
 drop table stats_list_bucket_1;

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/str_to_map.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/str_to_map.q b/ql/src/test/queries/clientpositive/str_to_map.q
index f2993b1..3280d89 100644
--- a/ql/src/test/queries/clientpositive/str_to_map.q
+++ b/ql/src/test/queries/clientpositive/str_to_map.q
@@ -1,7 +1,6 @@
 set hive.mapred.mode=nonstrict;
 set hive.fetch.task.conversion=more;
 
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 desc function str_to_map;
 desc function extended str_to_map;

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/subquery_multiinsert.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/subquery_multiinsert.q b/ql/src/test/queries/clientpositive/subquery_multiinsert.q
index bea2e13..9d70f51 100644
--- a/ql/src/test/queries/clientpositive/subquery_multiinsert.q
+++ b/ql/src/test/queries/clientpositive/subquery_multiinsert.q
@@ -2,7 +2,6 @@ set hive.mapred.mode=nonstrict;
 set hive.exec.post.hooks=org.apache.hadoop.hive.ql.hooks.PostExecutePrinter,org.apache.hadoop.hive.ql.hooks.PrintCompletedTasksHook;
 
 -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 CREATE TABLE src_4(
   key STRING, 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/subquery_notin_having.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/subquery_notin_having.q b/ql/src/test/queries/clientpositive/subquery_notin_having.q
index 8b2914d..05148df 100644
--- a/ql/src/test/queries/clientpositive/subquery_notin_having.q
+++ b/ql/src/test/queries/clientpositive/subquery_notin_having.q
@@ -1,6 +1,5 @@
 set hive.mapred.mode=nonstrict;
 -- non agg, non corr
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 explain
 select key, count(*) 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/varchar_udf1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/varchar_udf1.q b/ql/src/test/queries/clientpositive/varchar_udf1.q
index ff40b31..4d1f884 100644
--- a/ql/src/test/queries/clientpositive/varchar_udf1.q
+++ b/ql/src/test/queries/clientpositive/varchar_udf1.q
@@ -4,7 +4,6 @@ create table varchar_udf_1 (c1 string, c2 string, c3 varchar(10), c4 varchar(20)
 insert overwrite table varchar_udf_1
   select key, value, key, value from src where key = '238' limit 1;
 
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 -- UDFs with varchar support
 select 

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/queries/clientpositive/vector_cast_constant.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/vector_cast_constant.q b/ql/src/test/queries/clientpositive/vector_cast_constant.q
index c50dd8f..94bee09 100644
--- a/ql/src/test/queries/clientpositive/vector_cast_constant.q
+++ b/ql/src/test/queries/clientpositive/vector_cast_constant.q
@@ -2,7 +2,6 @@ set hive.mapred.mode=nonstrict;
 set hive.explain.user=false;
 SET hive.vectorized.execution.enabled=true;
 
--- JAVA_VERSION_SPECIFIC_OUTPUT
 
 DROP TABLE over1k;
 DROP TABLE over1korc;

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.java1.7.out b/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.java1.7.out
deleted file mode 100644
index 4ea70e3..0000000
--- a/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.java1.7.out
+++ /dev/null
@@ -1,73 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE Employee_Part
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE Employee_Part
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
-row format delimited fields terminated by '|'  stored as textfile
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@Employee_Part
-POSTHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
-row format delimited fields terminated by '|'  stored as textfile
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@Employee_Part
-PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@employee_part
-POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@employee_part
-POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=UK
-PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@employee_part
-POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@employee_part
-POSTHOOK: Output: default@employee_part@employeesalary=4000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@employee_part
-POSTHOOK: Output: default@employee_part@employeesalary=3500.0/country=UK
-PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@employee_part
-POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=UK
-FAILED: SemanticException [Error 30007]: Invalid partitioning key/value specified in ANALYZE statement : {employeesalary=4000.0, country=Canada}

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.java1.8.out b/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.java1.8.out
deleted file mode 100644
index 7cae55e..0000000
--- a/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.java1.8.out
+++ /dev/null
@@ -1,73 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE Employee_Part
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE Employee_Part
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
-row format delimited fields terminated by '|'  stored as textfile
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@Employee_Part
-POSTHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
-row format delimited fields terminated by '|'  stored as textfile
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@Employee_Part
-PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@employee_part
-POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@employee_part
-POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=UK
-PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@employee_part
-POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@employee_part
-POSTHOOK: Output: default@employee_part@employeesalary=4000.0/country=USA
-PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@employee_part
-POSTHOOK: Output: default@employee_part@employeesalary=3500.0/country=UK
-PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@employee_part
-POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@employee_part
-POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=UK
-FAILED: SemanticException [Error 30007]: Invalid partitioning key/value specified in ANALYZE statement : {country=Canada, employeesalary=4000.0}

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out b/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out
new file mode 100644
index 0000000..3261f78
--- /dev/null
+++ b/ql/src/test/results/clientnegative/columnstats_partlvl_invalid_values.q.out
@@ -0,0 +1,69 @@
+PREHOOK: query: DROP TABLE Employee_Part
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE Employee_Part
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
+row format delimited fields terminated by '|'  stored as textfile
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@Employee_Part
+POSTHOOK: query: CREATE TABLE Employee_Part(employeeID int, employeeName String) partitioned by (employeeSalary double, country string)
+row format delimited fields terminated by '|'  stored as textfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@Employee_Part
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='USA')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='2000.0', country='UK')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=2000.0/country=UK
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='USA')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='4000.0', country='USA')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=4000.0/country=USA
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3500.0', country='UK')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3500.0/country=UK
+PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@employee_part
+POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/employee2.dat" INTO TABLE Employee_Part partition(employeeSalary='3000.0', country='UK')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@employee_part
+POSTHOOK: Output: default@employee_part@employeesalary=3000.0/country=UK
+FAILED: SemanticException [Error 30007]: Invalid partitioning key/value specified in ANALYZE statement : {employeesalary=4000.0, country=Canada}

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/authorization_explain.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/authorization_explain.q.java1.7.out b/ql/src/test/results/clientpositive/authorization_explain.q.java1.7.out
deleted file mode 100644
index fefb50c..0000000
--- a/ql/src/test/results/clientpositive/authorization_explain.q.java1.7.out
+++ /dev/null
@@ -1,44 +0,0 @@
-Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-explain authorization select * from src join srcpart
-PREHOOK: type: QUERY
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-explain authorization select * from src join srcpart
-POSTHOOK: type: QUERY
-INPUTS: 
-  default@src
-  default@srcpart
-  default@srcpart@ds=2008-04-08/hr=11
-  default@srcpart@ds=2008-04-08/hr=12
-  default@srcpart@ds=2008-04-09/hr=11
-  default@srcpart@ds=2008-04-09/hr=12
-OUTPUTS: 
-#### A masked pattern was here ####
-CURRENT_USER: 
-  hive_test_user
-OPERATION: 
-  QUERY
-Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
-PREHOOK: query: explain formatted authorization select * from src join srcpart
-PREHOOK: type: QUERY
-POSTHOOK: query: explain formatted authorization select * from src join srcpart
-POSTHOOK: type: QUERY
-#### A masked pattern was here ####
-PREHOOK: query: explain authorization use default
-PREHOOK: type: SWITCHDATABASE
-POSTHOOK: query: explain authorization use default
-POSTHOOK: type: SWITCHDATABASE
-INPUTS: 
-  database:default
-OUTPUTS: 
-CURRENT_USER: 
-  hive_test_user
-OPERATION: 
-  SWITCHDATABASE
-PREHOOK: query: explain formatted authorization use default
-PREHOOK: type: SWITCHDATABASE
-POSTHOOK: query: explain formatted authorization use default
-POSTHOOK: type: SWITCHDATABASE
-{"INPUTS":["database:default"],"OUTPUTS":[],"CURRENT_USER":"hive_test_user","OPERATION":"SWITCHDATABASE"}

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/authorization_explain.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/authorization_explain.q.java1.8.out b/ql/src/test/results/clientpositive/authorization_explain.q.java1.8.out
deleted file mode 100644
index b7ec209..0000000
--- a/ql/src/test/results/clientpositive/authorization_explain.q.java1.8.out
+++ /dev/null
@@ -1,47 +0,0 @@
-Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-explain authorization select * from src join srcpart
-PREHOOK: type: QUERY
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-explain authorization select * from src join srcpart
-POSTHOOK: type: QUERY
-INPUTS: 
-  default@src
-  default@srcpart
-  default@srcpart@ds=2008-04-08/hr=11
-  default@srcpart@ds=2008-04-08/hr=12
-  default@srcpart@ds=2008-04-09/hr=11
-  default@srcpart@ds=2008-04-09/hr=12
-OUTPUTS: 
-#### A masked pattern was here ####
-CURRENT_USER: 
-  hive_test_user
-OPERATION: 
-  QUERY
-AUTHORIZATION_FAILURES: 
-  No privilege 'Select' found for inputs { database:default, table:src, columnName:key}
-  No privilege 'Select' found for inputs { database:default, table:srcpart, columnName:key}
-Warning: Shuffle Join JOIN[7][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
-PREHOOK: query: explain formatted authorization select * from src join srcpart
-PREHOOK: type: QUERY
-POSTHOOK: query: explain formatted authorization select * from src join srcpart
-POSTHOOK: type: QUERY
-#### A masked pattern was here ####
-PREHOOK: query: explain authorization use default
-PREHOOK: type: SWITCHDATABASE
-POSTHOOK: query: explain authorization use default
-POSTHOOK: type: SWITCHDATABASE
-INPUTS: 
-  database:default
-OUTPUTS: 
-CURRENT_USER: 
-  hive_test_user
-OPERATION: 
-  SWITCHDATABASE
-PREHOOK: query: explain formatted authorization use default
-PREHOOK: type: SWITCHDATABASE
-POSTHOOK: query: explain formatted authorization use default
-POSTHOOK: type: SWITCHDATABASE
-{"INPUTS":["database:default"],"OUTPUTS":[],"CURRENT_USER":"hive_test_user","OPERATION":"SWITCHDATABASE"}

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/authorization_explain.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/authorization_explain.q.out b/ql/src/test/results/clientpositive/authorization_explain.q.out
new file mode 100644
index 0000000..851b845
--- /dev/null
+++ b/ql/src/test/results/clientpositive/authorization_explain.q.out
@@ -0,0 +1,40 @@
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: explain authorization select * from src join srcpart
+PREHOOK: type: QUERY
+POSTHOOK: query: explain authorization select * from src join srcpart
+POSTHOOK: type: QUERY
+INPUTS: 
+  default@src
+  default@srcpart
+  default@srcpart@ds=2008-04-08/hr=11
+  default@srcpart@ds=2008-04-08/hr=12
+  default@srcpart@ds=2008-04-09/hr=11
+  default@srcpart@ds=2008-04-09/hr=12
+OUTPUTS: 
+#### A masked pattern was here ####
+CURRENT_USER: 
+  hive_test_user
+OPERATION: 
+  QUERY
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: explain formatted authorization select * from src join srcpart
+PREHOOK: type: QUERY
+POSTHOOK: query: explain formatted authorization select * from src join srcpart
+POSTHOOK: type: QUERY
+#### A masked pattern was here ####
+PREHOOK: query: explain authorization use default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: explain authorization use default
+POSTHOOK: type: SWITCHDATABASE
+INPUTS: 
+  database:default
+OUTPUTS: 
+CURRENT_USER: 
+  hive_test_user
+OPERATION: 
+  SWITCHDATABASE
+PREHOOK: query: explain formatted authorization use default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: explain formatted authorization use default
+POSTHOOK: type: SWITCHDATABASE
+{"INPUTS":["database:default"],"OUTPUTS":[],"CURRENT_USER":"hive_test_user","OPERATION":"SWITCHDATABASE"}

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/avro_date.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avro_date.q.java1.7.out b/ql/src/test/results/clientpositive/avro_date.q.java1.7.out
deleted file mode 100644
index 501b983..0000000
--- a/ql/src/test/results/clientpositive/avro_date.q.java1.7.out
+++ /dev/null
@@ -1,130 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE avro_date_staging
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE avro_date_staging
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE avro_date
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE avro_date
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE avro_date_casts
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE avro_date_casts
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE avro_date_staging (d date, m1 map<string, date>, l1 array<date>)
-   ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-   COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
-   STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@avro_date_staging
-POSTHOOK: query: CREATE TABLE avro_date_staging (d date, m1 map<string, date>, l1 array<date>)
-   ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-   COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
-   STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@avro_date_staging
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_date.txt' OVERWRITE INTO TABLE avro_date_staging
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@avro_date_staging
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_date.txt' OVERWRITE INTO TABLE avro_date_staging
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@avro_date_staging
-PREHOOK: query: CREATE TABLE avro_date (d date, m1 map<string, date>, l1 array<date>) 
-  PARTITIONED BY (p1 int, p2 date) 
-  ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' 
-  COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':' 
-  STORED AS AVRO
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@avro_date
-POSTHOOK: query: CREATE TABLE avro_date (d date, m1 map<string, date>, l1 array<date>) 
-  PARTITIONED BY (p1 int, p2 date) 
-  ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' 
-  COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':' 
-  STORED AS AVRO
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@avro_date
-PREHOOK: query: INSERT OVERWRITE TABLE avro_date PARTITION(p1=2, p2='2014-09-26') SELECT * FROM avro_date_staging
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_date_staging
-PREHOOK: Output: default@avro_date@p1=2/p2=2014-09-26
-POSTHOOK: query: INSERT OVERWRITE TABLE avro_date PARTITION(p1=2, p2='2014-09-26') SELECT * FROM avro_date_staging
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_date_staging
-POSTHOOK: Output: default@avro_date@p1=2/p2=2014-09-26
-POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).d SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:d, type:date, comment:null), ]
-POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).l1 SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:l1, type:array<date>, comment:null), ]
-POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).m1 SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:m1, type:map<string,date>, comment:null), ]
-PREHOOK: query: SELECT * FROM avro_date
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_date
-PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_date
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_date
-POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-2012-02-21	{"foo":"1980-12-16","bar":"1998-05-07"}	["2011-09-04","2011-09-05"]	2	2014-09-26
-2014-02-11	{"baz":"1981-12-16"}	["2011-09-05"]	2	2014-09-26
-1947-02-11	{"baz":"1921-12-16"}	["2011-09-05"]	2	2014-09-26
-8200-02-11	{"baz":"6981-12-16"}	["1039-09-05"]	2	2014-09-26
-PREHOOK: query: SELECT d, COUNT(d) FROM avro_date GROUP BY d
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_date
-PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT d, COUNT(d) FROM avro_date GROUP BY d
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_date
-POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-1947-02-11	1
-2012-02-21	1
-2014-02-11	1
-8200-02-11	1
-PREHOOK: query: SELECT * FROM avro_date WHERE d!='1947-02-11'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_date
-PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_date WHERE d!='1947-02-11'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_date
-POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-2012-02-21	{"foo":"1980-12-16","bar":"1998-05-07"}	["2011-09-04","2011-09-05"]	2	2014-09-26
-2014-02-11	{"baz":"1981-12-16"}	["2011-09-05"]	2	2014-09-26
-8200-02-11	{"baz":"6981-12-16"}	["1039-09-05"]	2	2014-09-26
-PREHOOK: query: SELECT * FROM avro_date WHERE d<'2014-12-21'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_date
-PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_date WHERE d<'2014-12-21'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_date
-POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-2012-02-21	{"foo":"1980-12-16","bar":"1998-05-07"}	["2011-09-04","2011-09-05"]	2	2014-09-26
-2014-02-11	{"baz":"1981-12-16"}	["2011-09-05"]	2	2014-09-26
-1947-02-11	{"baz":"1921-12-16"}	["2011-09-05"]	2	2014-09-26
-PREHOOK: query: SELECT * FROM avro_date WHERE d>'8000-12-01'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_date
-PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_date WHERE d>'8000-12-01'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_date
-POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-8200-02-11	{"baz":"6981-12-16"}	["1039-09-05"]	2	2014-09-26

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/avro_date.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avro_date.q.java1.8.out b/ql/src/test/results/clientpositive/avro_date.q.java1.8.out
deleted file mode 100644
index dea51c6..0000000
--- a/ql/src/test/results/clientpositive/avro_date.q.java1.8.out
+++ /dev/null
@@ -1,130 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE avro_date_staging
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE avro_date_staging
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE avro_date
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE avro_date
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE avro_date_casts
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE avro_date_casts
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE avro_date_staging (d date, m1 map<string, date>, l1 array<date>)
-   ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-   COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
-   STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@avro_date_staging
-POSTHOOK: query: CREATE TABLE avro_date_staging (d date, m1 map<string, date>, l1 array<date>)
-   ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-   COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
-   STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@avro_date_staging
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_date.txt' OVERWRITE INTO TABLE avro_date_staging
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@avro_date_staging
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_date.txt' OVERWRITE INTO TABLE avro_date_staging
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@avro_date_staging
-PREHOOK: query: CREATE TABLE avro_date (d date, m1 map<string, date>, l1 array<date>) 
-  PARTITIONED BY (p1 int, p2 date) 
-  ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' 
-  COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':' 
-  STORED AS AVRO
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@avro_date
-POSTHOOK: query: CREATE TABLE avro_date (d date, m1 map<string, date>, l1 array<date>) 
-  PARTITIONED BY (p1 int, p2 date) 
-  ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' 
-  COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':' 
-  STORED AS AVRO
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@avro_date
-PREHOOK: query: INSERT OVERWRITE TABLE avro_date PARTITION(p1=2, p2='2014-09-26') SELECT * FROM avro_date_staging
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_date_staging
-PREHOOK: Output: default@avro_date@p1=2/p2=2014-09-26
-POSTHOOK: query: INSERT OVERWRITE TABLE avro_date PARTITION(p1=2, p2='2014-09-26') SELECT * FROM avro_date_staging
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_date_staging
-POSTHOOK: Output: default@avro_date@p1=2/p2=2014-09-26
-POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).d SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:d, type:date, comment:null), ]
-POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).l1 SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:l1, type:array<date>, comment:null), ]
-POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).m1 SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:m1, type:map<string,date>, comment:null), ]
-PREHOOK: query: SELECT * FROM avro_date
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_date
-PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_date
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_date
-POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-2012-02-21	{"bar":"1998-05-07","foo":"1980-12-16"}	["2011-09-04","2011-09-05"]	2	2014-09-26
-2014-02-11	{"baz":"1981-12-16"}	["2011-09-05"]	2	2014-09-26
-1947-02-11	{"baz":"1921-12-16"}	["2011-09-05"]	2	2014-09-26
-8200-02-11	{"baz":"6981-12-16"}	["1039-09-05"]	2	2014-09-26
-PREHOOK: query: SELECT d, COUNT(d) FROM avro_date GROUP BY d
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_date
-PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT d, COUNT(d) FROM avro_date GROUP BY d
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_date
-POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-1947-02-11	1
-2012-02-21	1
-2014-02-11	1
-8200-02-11	1
-PREHOOK: query: SELECT * FROM avro_date WHERE d!='1947-02-11'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_date
-PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_date WHERE d!='1947-02-11'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_date
-POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-2012-02-21	{"bar":"1998-05-07","foo":"1980-12-16"}	["2011-09-04","2011-09-05"]	2	2014-09-26
-2014-02-11	{"baz":"1981-12-16"}	["2011-09-05"]	2	2014-09-26
-8200-02-11	{"baz":"6981-12-16"}	["1039-09-05"]	2	2014-09-26
-PREHOOK: query: SELECT * FROM avro_date WHERE d<'2014-12-21'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_date
-PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_date WHERE d<'2014-12-21'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_date
-POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-2012-02-21	{"bar":"1998-05-07","foo":"1980-12-16"}	["2011-09-04","2011-09-05"]	2	2014-09-26
-2014-02-11	{"baz":"1981-12-16"}	["2011-09-05"]	2	2014-09-26
-1947-02-11	{"baz":"1921-12-16"}	["2011-09-05"]	2	2014-09-26
-PREHOOK: query: SELECT * FROM avro_date WHERE d>'8000-12-01'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_date
-PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_date WHERE d>'8000-12-01'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_date
-POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
-#### A masked pattern was here ####
-8200-02-11	{"baz":"6981-12-16"}	["1039-09-05"]	2	2014-09-26

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/avro_date.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avro_date.q.out b/ql/src/test/results/clientpositive/avro_date.q.out
new file mode 100644
index 0000000..32501cf
--- /dev/null
+++ b/ql/src/test/results/clientpositive/avro_date.q.out
@@ -0,0 +1,126 @@
+PREHOOK: query: DROP TABLE avro_date_staging
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE avro_date_staging
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE avro_date
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE avro_date
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE avro_date_casts
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE avro_date_casts
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE avro_date_staging (d date, m1 map<string, date>, l1 array<date>)
+   ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+   COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+   STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_date_staging
+POSTHOOK: query: CREATE TABLE avro_date_staging (d date, m1 map<string, date>, l1 array<date>)
+   ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+   COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+   STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_date_staging
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_date.txt' OVERWRITE INTO TABLE avro_date_staging
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@avro_date_staging
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_date.txt' OVERWRITE INTO TABLE avro_date_staging
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@avro_date_staging
+PREHOOK: query: CREATE TABLE avro_date (d date, m1 map<string, date>, l1 array<date>) 
+  PARTITIONED BY (p1 int, p2 date) 
+  ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' 
+  COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':' 
+  STORED AS AVRO
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_date
+POSTHOOK: query: CREATE TABLE avro_date (d date, m1 map<string, date>, l1 array<date>) 
+  PARTITIONED BY (p1 int, p2 date) 
+  ROW FORMAT DELIMITED FIELDS TERMINATED BY '|' 
+  COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':' 
+  STORED AS AVRO
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_date
+PREHOOK: query: INSERT OVERWRITE TABLE avro_date PARTITION(p1=2, p2='2014-09-26') SELECT * FROM avro_date_staging
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date_staging
+PREHOOK: Output: default@avro_date@p1=2/p2=2014-09-26
+POSTHOOK: query: INSERT OVERWRITE TABLE avro_date PARTITION(p1=2, p2='2014-09-26') SELECT * FROM avro_date_staging
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date_staging
+POSTHOOK: Output: default@avro_date@p1=2/p2=2014-09-26
+POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).d SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:d, type:date, comment:null), ]
+POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).l1 SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:l1, type:array<date>, comment:null), ]
+POSTHOOK: Lineage: avro_date PARTITION(p1=2,p2=2014-09-26).m1 SIMPLE [(avro_date_staging)avro_date_staging.FieldSchema(name:m1, type:map<string,date>, comment:null), ]
+PREHOOK: query: SELECT * FROM avro_date
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_date
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+2012-02-21	{"bar":"1998-05-07","foo":"1980-12-16"}	["2011-09-04","2011-09-05"]	2	2014-09-26
+2014-02-11	{"baz":"1981-12-16"}	["2011-09-05"]	2	2014-09-26
+1947-02-11	{"baz":"1921-12-16"}	["2011-09-05"]	2	2014-09-26
+8200-02-11	{"baz":"6981-12-16"}	["1039-09-05"]	2	2014-09-26
+PREHOOK: query: SELECT d, COUNT(d) FROM avro_date GROUP BY d
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT d, COUNT(d) FROM avro_date GROUP BY d
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+1947-02-11	1
+2012-02-21	1
+2014-02-11	1
+8200-02-11	1
+PREHOOK: query: SELECT * FROM avro_date WHERE d!='1947-02-11'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_date WHERE d!='1947-02-11'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+2012-02-21	{"bar":"1998-05-07","foo":"1980-12-16"}	["2011-09-04","2011-09-05"]	2	2014-09-26
+2014-02-11	{"baz":"1981-12-16"}	["2011-09-05"]	2	2014-09-26
+8200-02-11	{"baz":"6981-12-16"}	["1039-09-05"]	2	2014-09-26
+PREHOOK: query: SELECT * FROM avro_date WHERE d<'2014-12-21'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_date WHERE d<'2014-12-21'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+2012-02-21	{"bar":"1998-05-07","foo":"1980-12-16"}	["2011-09-04","2011-09-05"]	2	2014-09-26
+2014-02-11	{"baz":"1981-12-16"}	["2011-09-05"]	2	2014-09-26
+1947-02-11	{"baz":"1921-12-16"}	["2011-09-05"]	2	2014-09-26
+PREHOOK: query: SELECT * FROM avro_date WHERE d>'8000-12-01'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_date
+PREHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_date WHERE d>'8000-12-01'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_date
+POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
+#### A masked pattern was here ####
+8200-02-11	{"baz":"6981-12-16"}	["1039-09-05"]	2	2014-09-26

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.7.out b/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.7.out
deleted file mode 100644
index 8f8065e..0000000
--- a/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.7.out
+++ /dev/null
@@ -1,57 +0,0 @@
-PREHOOK: query: -- These test attempts to deserialize an Avro file that contains map null values, and the file schema
--- vs record schema have the null values in different positions
--- i.e.
--- fileSchema   = [{ "type" : "map", "values" : ["string","null"]}, "null"]
--- recordSchema = ["null", { "type" : "map", "values" : ["string","null"]}]
-
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE IF EXISTS avro_table
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- These test attempts to deserialize an Avro file that contains map null values, and the file schema
--- vs record schema have the null values in different positions
--- i.e.
--- fileSchema   = [{ "type" : "map", "values" : ["string","null"]}, "null"]
--- recordSchema = ["null", { "type" : "map", "values" : ["string","null"]}]
-
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE IF EXISTS avro_table
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@avro_table
-POSTHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@avro_table
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@avro_table
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@avro_table
-PREHOOK: query: SELECT * FROM avro_table
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_table
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_table
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_table
-#### A masked pattern was here ####
-{"key4":null,"key3":"val3"}
-{"key4":null,"key3":"val3"}
-{"key2":"val2","key1":null}
-{"key4":null,"key3":"val3"}
-{"key4":null,"key3":"val3"}
-PREHOOK: query: DROP TABLE avro_table
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@avro_table
-PREHOOK: Output: default@avro_table
-POSTHOOK: query: DROP TABLE avro_table
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@avro_table
-POSTHOOK: Output: default@avro_table

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.8.out b/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.8.out
deleted file mode 100644
index 127d8b3..0000000
--- a/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.java1.8.out
+++ /dev/null
@@ -1,57 +0,0 @@
-PREHOOK: query: -- These test attempts to deserialize an Avro file that contains map null values, and the file schema
--- vs record schema have the null values in different positions
--- i.e.
--- fileSchema   = [{ "type" : "map", "values" : ["string","null"]}, "null"]
--- recordSchema = ["null", { "type" : "map", "values" : ["string","null"]}]
-
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE IF EXISTS avro_table
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- These test attempts to deserialize an Avro file that contains map null values, and the file schema
--- vs record schema have the null values in different positions
--- i.e.
--- fileSchema   = [{ "type" : "map", "values" : ["string","null"]}, "null"]
--- recordSchema = ["null", { "type" : "map", "values" : ["string","null"]}]
-
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE IF EXISTS avro_table
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@avro_table
-POSTHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@avro_table
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@avro_table
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@avro_table
-PREHOOK: query: SELECT * FROM avro_table
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_table
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_table
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_table
-#### A masked pattern was here ####
-{"key3":"val3","key4":null}
-{"key3":"val3","key4":null}
-{"key1":null,"key2":"val2"}
-{"key3":"val3","key4":null}
-{"key3":"val3","key4":null}
-PREHOOK: query: DROP TABLE avro_table
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@avro_table
-PREHOOK: Output: default@avro_table
-POSTHOOK: query: DROP TABLE avro_table
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@avro_table
-POSTHOOK: Output: default@avro_table

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.out b/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.out
new file mode 100644
index 0000000..2d983f1
--- /dev/null
+++ b/ql/src/test/results/clientpositive/avro_deserialize_map_null.q.out
@@ -0,0 +1,55 @@
+PREHOOK: query: -- These test attempts to deserialize an Avro file that contains map null values, and the file schema
+-- vs record schema have the null values in different positions
+-- i.e.
+-- fileSchema   = [{ "type" : "map", "values" : ["string","null"]}, "null"]
+-- recordSchema = ["null", { "type" : "map", "values" : ["string","null"]}]
+
+
+DROP TABLE IF EXISTS avro_table
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- These test attempts to deserialize an Avro file that contains map null values, and the file schema
+-- vs record schema have the null values in different positions
+-- i.e.
+-- fileSchema   = [{ "type" : "map", "values" : ["string","null"]}, "null"]
+-- recordSchema = ["null", { "type" : "map", "values" : ["string","null"]}]
+
+
+DROP TABLE IF EXISTS avro_table
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_table
+POSTHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_table
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@avro_table
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@avro_table
+PREHOOK: query: SELECT * FROM avro_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_table
+#### A masked pattern was here ####
+{"key3":"val3","key4":null}
+{"key3":"val3","key4":null}
+{"key1":null,"key2":"val2"}
+{"key3":"val3","key4":null}
+{"key3":"val3","key4":null}
+PREHOOK: query: DROP TABLE avro_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@avro_table
+PREHOOK: Output: default@avro_table
+POSTHOOK: query: DROP TABLE avro_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@avro_table
+POSTHOOK: Output: default@avro_table


[06/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/stats_list_bucket.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats_list_bucket.q.out b/ql/src/test/results/clientpositive/stats_list_bucket.q.out
new file mode 100644
index 0000000..c34c414
--- /dev/null
+++ b/ql/src/test/results/clientpositive/stats_list_bucket.q.out
@@ -0,0 +1,189 @@
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+
+drop table stats_list_bucket
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+
+drop table stats_list_bucket
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table stats_list_bucket_1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table stats_list_bucket_1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table stats_list_bucket (
+  c1 string,
+  c2 string
+) partitioned by (ds string, hr string)
+skewed by (c1, c2) on  (('466','val_466'),('287','val_287'),('82','val_82'))
+stored as directories
+stored as rcfile
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@stats_list_bucket
+POSTHOOK: query: create table stats_list_bucket (
+  c1 string,
+  c2 string
+) partitioned by (ds string, hr string)
+skewed by (c1, c2) on  (('466','val_466'),('287','val_287'),('82','val_82'))
+stored as directories
+stored as rcfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@stats_list_bucket
+PREHOOK: query: -- Try partitioned table with list bucketing.
+-- The stats should show 500 rows loaded, as many rows as the src table has.
+
+insert overwrite table stats_list_bucket partition (ds = '2008-04-08',  hr = '11')
+  select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@stats_list_bucket@ds=2008-04-08/hr=11
+POSTHOOK: query: -- Try partitioned table with list bucketing.
+-- The stats should show 500 rows loaded, as many rows as the src table has.
+
+insert overwrite table stats_list_bucket partition (ds = '2008-04-08',  hr = '11')
+  select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@stats_list_bucket@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: stats_list_bucket PARTITION(ds=2008-04-08,hr=11).c1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_list_bucket PARTITION(ds=2008-04-08,hr=11).c2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: desc formatted stats_list_bucket partition (ds = '2008-04-08',  hr = '11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@stats_list_bucket
+POSTHOOK: query: desc formatted stats_list_bucket partition (ds = '2008-04-08',  hr = '11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@stats_list_bucket
+# col_name            	data_type           	comment             
+	 	 
+c1                  	string              	                    
+c2                  	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	stats_list_bucket   	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	4                   
+	numRows             	500                 
+	rawDataSize         	4812                
+	totalSize           	5522                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[c1, c2]            	 
+Skewed Values:      	[[466, val_466], [287, val_287], [82, val_82]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[466, val_466]=/stats_list_bucket/ds=2008-04-08/hr=11/c1=466/c2=val_466, [82, val_82]=/stats_list_bucket/ds=2008-04-08/hr=11/c1=82/c2=val_82, [287, val_287]=/stats_list_bucket/ds=2008-04-08/hr=11/c1=287/c2=val_287}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: -- Also try non-partitioned table with list bucketing.
+-- Stats should show the same number of rows.
+
+create table stats_list_bucket_1 (
+  c1 string,
+  c2 string
+)
+skewed by (c1, c2) on  (('466','val_466'),('287','val_287'),('82','val_82'))
+stored as directories
+stored as rcfile
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@stats_list_bucket_1
+POSTHOOK: query: -- Also try non-partitioned table with list bucketing.
+-- Stats should show the same number of rows.
+
+create table stats_list_bucket_1 (
+  c1 string,
+  c2 string
+)
+skewed by (c1, c2) on  (('466','val_466'),('287','val_287'),('82','val_82'))
+stored as directories
+stored as rcfile
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@stats_list_bucket_1
+PREHOOK: query: insert overwrite table stats_list_bucket_1
+  select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@stats_list_bucket_1
+POSTHOOK: query: insert overwrite table stats_list_bucket_1
+  select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@stats_list_bucket_1
+POSTHOOK: Lineage: stats_list_bucket_1.c1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: stats_list_bucket_1.c2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: desc formatted stats_list_bucket_1
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@stats_list_bucket_1
+POSTHOOK: query: desc formatted stats_list_bucket_1
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@stats_list_bucket_1
+# col_name            	data_type           	comment             
+	 	 
+c1                  	string              	                    
+c2                  	string              	                    
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	4                   
+	numRows             	500                 
+	rawDataSize         	4812                
+	totalSize           	5522                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[c1, c2]            	 
+Skewed Values:      	[[466, val_466], [287, val_287], [82, val_82]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[466, val_466]=/stats_list_bucket_1/c1=466/c2=val_466, [287, val_287]=/stats_list_bucket_1/c1=287/c2=val_287, [82, val_82]=/stats_list_bucket_1/c1=82/c2=val_82}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: drop table stats_list_bucket
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@stats_list_bucket
+PREHOOK: Output: default@stats_list_bucket
+POSTHOOK: query: drop table stats_list_bucket
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@stats_list_bucket
+POSTHOOK: Output: default@stats_list_bucket
+PREHOOK: query: drop table stats_list_bucket_1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@stats_list_bucket_1
+PREHOOK: Output: default@stats_list_bucket_1
+POSTHOOK: query: drop table stats_list_bucket_1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@stats_list_bucket_1
+POSTHOOK: Output: default@stats_list_bucket_1

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/str_to_map.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/str_to_map.q.java1.7.out b/ql/src/test/results/clientpositive/str_to_map.q.java1.7.out
deleted file mode 100644
index 652acbb..0000000
--- a/ql/src/test/results/clientpositive/str_to_map.q.java1.7.out
+++ /dev/null
@@ -1,220 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-desc function str_to_map
-PREHOOK: type: DESCFUNCTION
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-desc function str_to_map
-POSTHOOK: type: DESCFUNCTION
-str_to_map(text, delimiter1, delimiter2) - Creates a map by parsing text 
-PREHOOK: query: desc function extended str_to_map
-PREHOOK: type: DESCFUNCTION
-POSTHOOK: query: desc function extended str_to_map
-POSTHOOK: type: DESCFUNCTION
-str_to_map(text, delimiter1, delimiter2) - Creates a map by parsing text 
-Split text into key-value pairs using two delimiters. The first delimiter seperates pairs, and the second delimiter sperates key and value. If only one parameter is given, default delimiters are used: ',' as delimiter1 and '=' as delimiter2.
-PREHOOK: query: explain select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3
-PREHOOK: type: QUERY
-POSTHOOK: query: explain select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 3
-      Processor Tree:
-        TableScan
-          alias: src
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: str_to_map('a=1,b=2,c=3',',','=')['a'] (type: string)
-            outputColumnNames: _col0
-            Statistics: Num rows: 500 Data size: 42500 Basic stats: COMPLETE Column stats: COMPLETE
-            Limit
-              Number of rows: 3
-              Statistics: Num rows: 3 Data size: 255 Basic stats: COMPLETE Column stats: COMPLETE
-              ListSink
-
-PREHOOK: query: select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-1
-1
-1
-PREHOOK: query: explain select str_to_map('a:1,b:2,c:3') from src limit 3
-PREHOOK: type: QUERY
-POSTHOOK: query: explain select str_to_map('a:1,b:2,c:3') from src limit 3
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 3
-      Processor Tree:
-        TableScan
-          alias: src
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: str_to_map('a:1,b:2,c:3') (type: map<string,string>)
-            outputColumnNames: _col0
-            Statistics: Num rows: 500 Data size: 377000 Basic stats: COMPLETE Column stats: COMPLETE
-            Limit
-              Number of rows: 3
-              Statistics: Num rows: 3 Data size: 2262 Basic stats: COMPLETE Column stats: COMPLETE
-              ListSink
-
-PREHOOK: query: select str_to_map('a:1,b:2,c:3') from src limit 3
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: select str_to_map('a:1,b:2,c:3') from src limit 3
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-{"b":"2","a":"1","c":"3"}
-{"b":"2","a":"1","c":"3"}
-{"b":"2","a":"1","c":"3"}
-PREHOOK: query: explain select str_to_map('a:1,b:2,c:3',',',':') from src limit 3
-PREHOOK: type: QUERY
-POSTHOOK: query: explain select str_to_map('a:1,b:2,c:3',',',':') from src limit 3
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 3
-      Processor Tree:
-        TableScan
-          alias: src
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: str_to_map('a:1,b:2,c:3',',',':') (type: map<string,string>)
-            outputColumnNames: _col0
-            Statistics: Num rows: 500 Data size: 377000 Basic stats: COMPLETE Column stats: COMPLETE
-            Limit
-              Number of rows: 3
-              Statistics: Num rows: 3 Data size: 2262 Basic stats: COMPLETE Column stats: COMPLETE
-              ListSink
-
-PREHOOK: query: select str_to_map('a:1,b:2,c:3',',',':') from src limit 3
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: select str_to_map('a:1,b:2,c:3',',',':') from src limit 3
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-{"b":"2","a":"1","c":"3"}
-{"b":"2","a":"1","c":"3"}
-{"b":"2","a":"1","c":"3"}
-PREHOOK: query: explain select str_to_map(t.ss,',',':')['a']
-from (select transform('a:1,b:2,c:3') using 'cat' as (ss) from src) t
-limit 3
-PREHOOK: type: QUERY
-POSTHOOK: query: explain select str_to_map(t.ss,',',':')['a']
-from (select transform('a:1,b:2,c:3') using 'cat' as (ss) from src) t
-limit 3
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: 'a:1,b:2,c:3' (type: string)
-              outputColumnNames: _col0
-              Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: str_to_map(_col0,',',':')['a'] (type: string)
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 500 Data size: 92000 Basic stats: COMPLETE Column stats: COMPLETE
-                  Limit
-                    Number of rows: 3
-                    Statistics: Num rows: 3 Data size: 552 Basic stats: COMPLETE Column stats: COMPLETE
-                    File Output Operator
-                      compressed: false
-                      Statistics: Num rows: 3 Data size: 552 Basic stats: COMPLETE Column stats: COMPLETE
-                      table:
-                          input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: 3
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select str_to_map(t.ss,',',':')['a']
-from (select transform('a:1,b:2,c:3') using 'cat' as (ss) from src) t
-limit 3
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: select str_to_map(t.ss,',',':')['a']
-from (select transform('a:1,b:2,c:3') using 'cat' as (ss) from src) t
-limit 3
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-1
-1
-1
-PREHOOK: query: drop table tbl_s2m
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table tbl_s2m
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table tbl_s2m as select 'ABC=CC_333=444' as t from src tablesample (3 rows)
-PREHOOK: type: CREATETABLE_AS_SELECT
-PREHOOK: Input: default@src
-PREHOOK: Output: database:default
-PREHOOK: Output: default@tbl_s2m
-POSTHOOK: query: create table tbl_s2m as select 'ABC=CC_333=444' as t from src tablesample (3 rows)
-POSTHOOK: type: CREATETABLE_AS_SELECT
-POSTHOOK: Input: default@src
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@tbl_s2m
-POSTHOOK: Lineage: tbl_s2m.t SIMPLE []
-PREHOOK: query: select str_to_map(t,'_','=')['333'] from tbl_s2m
-PREHOOK: type: QUERY
-PREHOOK: Input: default@tbl_s2m
-#### A masked pattern was here ####
-POSTHOOK: query: select str_to_map(t,'_','=')['333'] from tbl_s2m
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@tbl_s2m
-#### A masked pattern was here ####
-444
-444
-444
-PREHOOK: query: drop table tbl_s2m
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@tbl_s2m
-PREHOOK: Output: default@tbl_s2m
-POSTHOOK: query: drop table tbl_s2m
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@tbl_s2m
-POSTHOOK: Output: default@tbl_s2m

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/str_to_map.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/str_to_map.q.java1.8.out b/ql/src/test/results/clientpositive/str_to_map.q.java1.8.out
deleted file mode 100644
index 23b0cbb..0000000
--- a/ql/src/test/results/clientpositive/str_to_map.q.java1.8.out
+++ /dev/null
@@ -1,219 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-desc function str_to_map
-PREHOOK: type: DESCFUNCTION
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-desc function str_to_map
-POSTHOOK: type: DESCFUNCTION
-str_to_map(text, delimiter1, delimiter2) - Creates a map by parsing text 
-PREHOOK: query: desc function extended str_to_map
-PREHOOK: type: DESCFUNCTION
-POSTHOOK: query: desc function extended str_to_map
-POSTHOOK: type: DESCFUNCTION
-str_to_map(text, delimiter1, delimiter2) - Creates a map by parsing text 
-Split text into key-value pairs using two delimiters. The first delimiter seperates pairs, and the second delimiter sperates key and value. If only one parameter is given, default delimiters are used: ',' as delimiter1 and '=' as delimiter2.
-PREHOOK: query: explain select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3
-PREHOOK: type: QUERY
-POSTHOOK: query: explain select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 3
-      Processor Tree:
-        TableScan
-          alias: src
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: str_to_map('a=1,b=2,c=3',',','=')['a'] (type: string)
-            outputColumnNames: _col0
-            Statistics: Num rows: 500 Data size: 92000 Basic stats: COMPLETE Column stats: COMPLETE
-            Limit
-              Number of rows: 3
-              Statistics: Num rows: 3 Data size: 552 Basic stats: COMPLETE Column stats: COMPLETE
-              ListSink
-
-PREHOOK: query: select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-1
-1
-1
-PREHOOK: query: explain select str_to_map('a:1,b:2,c:3') from src limit 3
-PREHOOK: type: QUERY
-POSTHOOK: query: explain select str_to_map('a:1,b:2,c:3') from src limit 3
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 3
-      Processor Tree:
-        TableScan
-          alias: src
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: str_to_map('a:1,b:2,c:3') (type: map<string,string>)
-            outputColumnNames: _col0
-            Statistics: Num rows: 500 Data size: 460000 Basic stats: COMPLETE Column stats: COMPLETE
-            Limit
-              Number of rows: 3
-              Statistics: Num rows: 3 Data size: 2760 Basic stats: COMPLETE Column stats: COMPLETE
-              ListSink
-
-PREHOOK: query: select str_to_map('a:1,b:2,c:3') from src limit 3
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: select str_to_map('a:1,b:2,c:3') from src limit 3
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-{"a":"1","b":"2","c":"3"}
-{"a":"1","b":"2","c":"3"}
-{"a":"1","b":"2","c":"3"}
-PREHOOK: query: explain select str_to_map('a:1,b:2,c:3',',',':') from src limit 3
-PREHOOK: type: QUERY
-POSTHOOK: query: explain select str_to_map('a:1,b:2,c:3',',',':') from src limit 3
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: 3
-      Processor Tree:
-        TableScan
-          alias: src
-          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-          Select Operator
-            expressions: str_to_map('a:1,b:2,c:3',',',':') (type: map<string,string>)
-            outputColumnNames: _col0
-            Statistics: Num rows: 500 Data size: 460000 Basic stats: COMPLETE Column stats: COMPLETE
-            Limit
-              Number of rows: 3
-              Statistics: Num rows: 3 Data size: 2760 Basic stats: COMPLETE Column stats: COMPLETE
-              ListSink
-
-PREHOOK: query: select str_to_map('a:1,b:2,c:3',',',':') from src limit 3
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: select str_to_map('a:1,b:2,c:3',',',':') from src limit 3
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-{"a":"1","b":"2","c":"3"}
-{"a":"1","b":"2","c":"3"}
-{"a":"1","b":"2","c":"3"}
-PREHOOK: query: explain select str_to_map(t.ss,',',':')['a']
-from (select transform('a:1,b:2,c:3') using 'cat' as (ss) from src) t
-limit 3
-PREHOOK: type: QUERY
-POSTHOOK: query: explain select str_to_map(t.ss,',',':')['a']
-from (select transform('a:1,b:2,c:3') using 'cat' as (ss) from src) t
-limit 3
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              expressions: 'a:1,b:2,c:3' (type: string)
-              outputColumnNames: _col0
-              Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-              Transform Operator
-                command: cat
-                output info:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: str_to_map(_col0,',',':')['a'] (type: string)
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 500 Data size: 92000 Basic stats: COMPLETE Column stats: COMPLETE
-                  Limit
-                    Number of rows: 3
-                    Statistics: Num rows: 3 Data size: 552 Basic stats: COMPLETE Column stats: COMPLETE
-                    File Output Operator
-                      compressed: false
-                      Statistics: Num rows: 3 Data size: 552 Basic stats: COMPLETE Column stats: COMPLETE
-                      table:
-                          input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: 3
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select str_to_map(t.ss,',',':')['a']
-from (select transform('a:1,b:2,c:3') using 'cat' as (ss) from src) t
-limit 3
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: select str_to_map(t.ss,',',':')['a']
-from (select transform('a:1,b:2,c:3') using 'cat' as (ss) from src) t
-limit 3
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-1
-1
-1
-PREHOOK: query: drop table tbl_s2m
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table tbl_s2m
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table tbl_s2m as select 'ABC=CC_333=444' as t from src tablesample (3 rows)
-PREHOOK: type: CREATETABLE_AS_SELECT
-PREHOOK: Input: default@src
-PREHOOK: Output: database:default
-PREHOOK: Output: default@tbl_s2m
-POSTHOOK: query: create table tbl_s2m as select 'ABC=CC_333=444' as t from src tablesample (3 rows)
-POSTHOOK: type: CREATETABLE_AS_SELECT
-POSTHOOK: Input: default@src
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@tbl_s2m
-PREHOOK: query: select str_to_map(t,'_','=')['333'] from tbl_s2m
-PREHOOK: type: QUERY
-PREHOOK: Input: default@tbl_s2m
-#### A masked pattern was here ####
-POSTHOOK: query: select str_to_map(t,'_','=')['333'] from tbl_s2m
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@tbl_s2m
-#### A masked pattern was here ####
-444
-444
-444
-PREHOOK: query: drop table tbl_s2m
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@tbl_s2m
-PREHOOK: Output: default@tbl_s2m
-POSTHOOK: query: drop table tbl_s2m
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@tbl_s2m
-POSTHOOK: Output: default@tbl_s2m

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/str_to_map.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/str_to_map.q.out b/ql/src/test/results/clientpositive/str_to_map.q.out
new file mode 100644
index 0000000..30c98db
--- /dev/null
+++ b/ql/src/test/results/clientpositive/str_to_map.q.out
@@ -0,0 +1,216 @@
+PREHOOK: query: desc function str_to_map
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: desc function str_to_map
+POSTHOOK: type: DESCFUNCTION
+str_to_map(text, delimiter1, delimiter2) - Creates a map by parsing text 
+PREHOOK: query: desc function extended str_to_map
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: desc function extended str_to_map
+POSTHOOK: type: DESCFUNCTION
+str_to_map(text, delimiter1, delimiter2) - Creates a map by parsing text 
+Split text into key-value pairs using two delimiters. The first delimiter seperates pairs, and the second delimiter sperates key and value. If only one parameter is given, default delimiters are used: ',' as delimiter1 and '=' as delimiter2.
+PREHOOK: query: explain select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: 3
+      Processor Tree:
+        TableScan
+          alias: src
+          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: str_to_map('a=1,b=2,c=3',',','=')['a'] (type: string)
+            outputColumnNames: _col0
+            Statistics: Num rows: 500 Data size: 42500 Basic stats: COMPLETE Column stats: COMPLETE
+            Limit
+              Number of rows: 3
+              Statistics: Num rows: 3 Data size: 255 Basic stats: COMPLETE Column stats: COMPLETE
+              ListSink
+
+PREHOOK: query: select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select str_to_map('a=1,b=2,c=3',',','=')['a'] from src limit 3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+1
+1
+1
+PREHOOK: query: explain select str_to_map('a:1,b:2,c:3') from src limit 3
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select str_to_map('a:1,b:2,c:3') from src limit 3
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: 3
+      Processor Tree:
+        TableScan
+          alias: src
+          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: str_to_map('a:1,b:2,c:3') (type: map<string,string>)
+            outputColumnNames: _col0
+            Statistics: Num rows: 500 Data size: 377000 Basic stats: COMPLETE Column stats: COMPLETE
+            Limit
+              Number of rows: 3
+              Statistics: Num rows: 3 Data size: 2262 Basic stats: COMPLETE Column stats: COMPLETE
+              ListSink
+
+PREHOOK: query: select str_to_map('a:1,b:2,c:3') from src limit 3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select str_to_map('a:1,b:2,c:3') from src limit 3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+{"a":"1","b":"2","c":"3"}
+{"a":"1","b":"2","c":"3"}
+{"a":"1","b":"2","c":"3"}
+PREHOOK: query: explain select str_to_map('a:1,b:2,c:3',',',':') from src limit 3
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select str_to_map('a:1,b:2,c:3',',',':') from src limit 3
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: 3
+      Processor Tree:
+        TableScan
+          alias: src
+          Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: str_to_map('a:1,b:2,c:3',',',':') (type: map<string,string>)
+            outputColumnNames: _col0
+            Statistics: Num rows: 500 Data size: 377000 Basic stats: COMPLETE Column stats: COMPLETE
+            Limit
+              Number of rows: 3
+              Statistics: Num rows: 3 Data size: 2262 Basic stats: COMPLETE Column stats: COMPLETE
+              ListSink
+
+PREHOOK: query: select str_to_map('a:1,b:2,c:3',',',':') from src limit 3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select str_to_map('a:1,b:2,c:3',',',':') from src limit 3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+{"a":"1","b":"2","c":"3"}
+{"a":"1","b":"2","c":"3"}
+{"a":"1","b":"2","c":"3"}
+PREHOOK: query: explain select str_to_map(t.ss,',',':')['a']
+from (select transform('a:1,b:2,c:3') using 'cat' as (ss) from src) t
+limit 3
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select str_to_map(t.ss,',',':')['a']
+from (select transform('a:1,b:2,c:3') using 'cat' as (ss) from src) t
+limit 3
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: 'a:1,b:2,c:3' (type: string)
+              outputColumnNames: _col0
+              Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
+              Transform Operator
+                command: cat
+                output info:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                Statistics: Num rows: 500 Data size: 47500 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: str_to_map(_col0,',',':')['a'] (type: string)
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 500 Data size: 92000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Limit
+                    Number of rows: 3
+                    Statistics: Num rows: 3 Data size: 552 Basic stats: COMPLETE Column stats: COMPLETE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 3 Data size: 552 Basic stats: COMPLETE Column stats: COMPLETE
+                      table:
+                          input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 3
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select str_to_map(t.ss,',',':')['a']
+from (select transform('a:1,b:2,c:3') using 'cat' as (ss) from src) t
+limit 3
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select str_to_map(t.ss,',',':')['a']
+from (select transform('a:1,b:2,c:3') using 'cat' as (ss) from src) t
+limit 3
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+1
+1
+1
+PREHOOK: query: drop table tbl_s2m
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table tbl_s2m
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tbl_s2m as select 'ABC=CC_333=444' as t from src tablesample (3 rows)
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tbl_s2m
+POSTHOOK: query: create table tbl_s2m as select 'ABC=CC_333=444' as t from src tablesample (3 rows)
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tbl_s2m
+POSTHOOK: Lineage: tbl_s2m.t SIMPLE []
+PREHOOK: query: select str_to_map(t,'_','=')['333'] from tbl_s2m
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tbl_s2m
+#### A masked pattern was here ####
+POSTHOOK: query: select str_to_map(t,'_','=')['333'] from tbl_s2m
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tbl_s2m
+#### A masked pattern was here ####
+444
+444
+444
+PREHOOK: query: drop table tbl_s2m
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tbl_s2m
+PREHOOK: Output: default@tbl_s2m
+POSTHOOK: query: drop table tbl_s2m
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tbl_s2m
+POSTHOOK: Output: default@tbl_s2m

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.7.out b/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.7.out
deleted file mode 100644
index 279843b..0000000
--- a/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.7.out
+++ /dev/null
@@ -1,999 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE src_4(
-  key STRING, 
-  value STRING
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@src_4
-POSTHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE src_4(
-  key STRING, 
-  value STRING
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@src_4
-RUN: Stage-0:DDL
-PREHOOK: query: CREATE TABLE src_5( 
-  key STRING, 
-  value STRING
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@src_5
-POSTHOOK: query: CREATE TABLE src_5( 
-  key STRING, 
-  value STRING
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@src_5
-RUN: Stage-0:DDL
-Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-10 is a root stage
-  Stage-2 depends on stages: Stage-10
-  Stage-3 depends on stages: Stage-2
-  Stage-4 depends on stages: Stage-3
-  Stage-1 depends on stages: Stage-4
-  Stage-5 depends on stages: Stage-1
-  Stage-6 depends on stages: Stage-2
-  Stage-0 depends on stages: Stage-6
-  Stage-7 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-10
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: s1
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((key > '2') and key is null) (type: boolean)
-              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  aggregations: count()
-                  mode: hash
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    sort order: 
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col0 (type: bigint)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: count(VALUE._col0)
-          mode: mergepartial
-          outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: (_col0 = 0) (type: boolean)
-            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                keys: 0 (type: bigint)
-                mode: hash
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              value expressions: key (type: string), value (type: string)
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Semi Join 0 to 1
-          keys:
-            0 
-            1 
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string)
-              sort order: +
-              Map-reduce partition columns: _col0 (type: string)
-              Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col1 (type: string)
-          TableScan
-            alias: s1
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: (key > '2') (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Outer Join0 to 1
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0, _col1, _col5
-          Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: _col5 is null (type: boolean)
-            Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: _col0 (type: string), _col1 (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-4
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string)
-              sort order: +
-              Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col1 (type: string)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.src_5
-
-  Stage: Stage-1
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_5
-
-  Stage: Stage-5
-    Stats-Aggr Operator
-
-  Stage: Stage-6
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: key (type: string), value (type: string)
-              sort order: ++
-              Map-reduce partition columns: key (type: string), value (type: string)
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-          TableScan
-            alias: a
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((key > '9') and value is not null) (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  keys: _col0 (type: string), _col1 (type: string)
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string), _col1 (type: string)
-                    sort order: ++
-                    Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Semi Join 0 to 1
-          keys:
-            0 key (type: string), value (type: string)
-            1 _col0 (type: string), _col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.src_4
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_4
-
-  Stage: Stage-7
-    Stats-Aggr Operator
-
-Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@src_4
-PREHOOK: Output: default@src_5
-POSTHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@src_4
-POSTHOOK: Output: default@src_5
-POSTHOOK: Lineage: src_4.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_4.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-RUN: Stage-10:MAPRED
-RUN: Stage-2:MAPRED
-RUN: Stage-3:MAPRED
-RUN: Stage-6:MAPRED
-RUN: Stage-4:MAPRED
-RUN: Stage-0:MOVE
-RUN: Stage-1:MOVE
-RUN: Stage-7:STATS
-RUN: Stage-5:STATS
-PREHOOK: query: select * from src_4
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_4
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_4
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_4
-#### A masked pattern was here ####
-90	val_90
-90	val_90
-90	val_90
-92	val_92
-95	val_95
-95	val_95
-96	val_96
-97	val_97
-97	val_97
-98	val_98
-98	val_98
-PREHOOK: query: select * from src_5
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_5
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_5
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_5
-#### A masked pattern was here ####
-0	val_0
-0	val_0
-0	val_0
-10	val_10
-100	val_100
-100	val_100
-103	val_103
-103	val_103
-104	val_104
-104	val_104
-105	val_105
-11	val_11
-111	val_111
-113	val_113
-113	val_113
-114	val_114
-116	val_116
-118	val_118
-118	val_118
-119	val_119
-119	val_119
-119	val_119
-12	val_12
-12	val_12
-120	val_120
-120	val_120
-125	val_125
-125	val_125
-126	val_126
-128	val_128
-128	val_128
-128	val_128
-129	val_129
-129	val_129
-131	val_131
-133	val_133
-134	val_134
-134	val_134
-136	val_136
-137	val_137
-137	val_137
-138	val_138
-138	val_138
-138	val_138
-138	val_138
-143	val_143
-145	val_145
-146	val_146
-146	val_146
-149	val_149
-149	val_149
-15	val_15
-15	val_15
-150	val_150
-152	val_152
-152	val_152
-153	val_153
-155	val_155
-156	val_156
-157	val_157
-158	val_158
-160	val_160
-162	val_162
-163	val_163
-164	val_164
-164	val_164
-165	val_165
-165	val_165
-166	val_166
-167	val_167
-167	val_167
-167	val_167
-168	val_168
-169	val_169
-169	val_169
-169	val_169
-169	val_169
-17	val_17
-170	val_170
-172	val_172
-172	val_172
-174	val_174
-174	val_174
-175	val_175
-175	val_175
-176	val_176
-176	val_176
-177	val_177
-178	val_178
-179	val_179
-179	val_179
-18	val_18
-18	val_18
-180	val_180
-181	val_181
-183	val_183
-186	val_186
-187	val_187
-187	val_187
-187	val_187
-189	val_189
-19	val_19
-190	val_190
-191	val_191
-191	val_191
-192	val_192
-193	val_193
-193	val_193
-193	val_193
-194	val_194
-195	val_195
-195	val_195
-196	val_196
-197	val_197
-197	val_197
-199	val_199
-199	val_199
-199	val_199
-2	val_2
-Warning: Map Join MAPJOIN[55][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
-Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-10 is a root stage
-  Stage-14 depends on stages: Stage-10 , consists of Stage-17, Stage-2
-  Stage-17 has a backup stage: Stage-2
-  Stage-13 depends on stages: Stage-17
-  Stage-15 depends on stages: Stage-2, Stage-13
-  Stage-12 depends on stages: Stage-15
-  Stage-0 depends on stages: Stage-12
-  Stage-7 depends on stages: Stage-0
-  Stage-16 depends on stages: Stage-2, Stage-13
-  Stage-4 depends on stages: Stage-16
-  Stage-1 depends on stages: Stage-4
-  Stage-5 depends on stages: Stage-1
-  Stage-2
-
-STAGE PLANS:
-  Stage: Stage-10
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: s1
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((key > '2') and key is null) (type: boolean)
-              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  aggregations: count()
-                  mode: hash
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    sort order: 
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col0 (type: bigint)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: count(VALUE._col0)
-          mode: mergepartial
-          outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: (_col0 = 0) (type: boolean)
-            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                keys: 0 (type: bigint)
-                mode: hash
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-14
-    Conditional Operator
-
-  Stage: Stage-17
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $INTNAME 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        $INTNAME 
-          TableScan
-            HashTable Sink Operator
-              keys:
-                0 
-                1 
-
-  Stage: Stage-13
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Map Join Operator
-              condition map:
-                   Left Semi Join 0 to 1
-              keys:
-                0 
-                1 
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-      Local Work:
-        Map Reduce Local Work
-
-  Stage: Stage-15
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        sq_1:a 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        sq_1:a 
-          TableScan
-            alias: a
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((key > '9') and value is not null) (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  keys: _col0 (type: string), _col1 (type: string)
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                  HashTable Sink Operator
-                    keys:
-                      0 key (type: string), value (type: string)
-                      1 _col0 (type: string), _col1 (type: string)
-
-  Stage: Stage-12
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Map Join Operator
-              condition map:
-                   Left Semi Join 0 to 1
-              keys:
-                0 key (type: string), value (type: string)
-                1 _col0 (type: string), _col1 (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src_4
-      Local Work:
-        Map Reduce Local Work
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_4
-
-  Stage: Stage-7
-    Stats-Aggr Operator
-
-  Stage: Stage-16
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        sq_2:s1 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        sq_2:s1 
-          TableScan
-            alias: s1
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: (key > '2') (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                HashTable Sink Operator
-                  keys:
-                    0 _col0 (type: string)
-                    1 _col0 (type: string)
-
-  Stage: Stage-4
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Map Join Operator
-              condition map:
-                   Left Outer Join0 to 1
-              keys:
-                0 _col0 (type: string)
-                1 _col0 (type: string)
-              outputColumnNames: _col0, _col1, _col5
-              Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE Column stats: NONE
-              Filter Operator
-                predicate: _col5 is null (type: boolean)
-                Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: string), _col1 (type: string)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    sort order: +
-                    Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col1 (type: string)
-      Local Work:
-        Map Reduce Local Work
-      Reduce Operator Tree:
-        Select Operator
-          expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.src_5
-
-  Stage: Stage-1
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_5
-
-  Stage: Stage-5
-    Stats-Aggr Operator
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              value expressions: key (type: string), value (type: string)
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Semi Join 0 to 1
-          keys:
-            0 
-            1 
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-Warning: Map Join MAPJOIN[55][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
-Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@src_4
-PREHOOK: Output: default@src_5
-POSTHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@src_4
-POSTHOOK: Output: default@src_5
-POSTHOOK: Lineage: src_4.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_4.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-RUN: Stage-10:MAPRED
-RUN: Stage-14:CONDITIONAL
-RUN: Stage-17:MAPREDLOCAL
-RUN: Stage-13:MAPRED
-RUN: Stage-15:MAPREDLOCAL
-RUN: Stage-16:MAPREDLOCAL
-RUN: Stage-12:MAPRED
-RUN: Stage-4:MAPRED
-RUN: Stage-0:MOVE
-RUN: Stage-1:MOVE
-RUN: Stage-7:STATS
-RUN: Stage-5:STATS
-PREHOOK: query: select * from src_4
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_4
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_4
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_4
-#### A masked pattern was here ####
-90	val_90
-90	val_90
-90	val_90
-92	val_92
-95	val_95
-95	val_95
-96	val_96
-97	val_97
-97	val_97
-98	val_98
-98	val_98
-PREHOOK: query: select * from src_5
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_5
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_5
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_5
-#### A masked pattern was here ####
-0	val_0
-0	val_0
-0	val_0
-10	val_10
-100	val_100
-100	val_100
-103	val_103
-103	val_103
-104	val_104
-104	val_104
-105	val_105
-11	val_11
-111	val_111
-113	val_113
-113	val_113
-114	val_114
-116	val_116
-118	val_118
-118	val_118
-119	val_119
-119	val_119
-119	val_119
-12	val_12
-12	val_12
-120	val_120
-120	val_120
-125	val_125
-125	val_125
-126	val_126
-128	val_128
-128	val_128
-128	val_128
-129	val_129
-129	val_129
-131	val_131
-133	val_133
-134	val_134
-134	val_134
-136	val_136
-137	val_137
-137	val_137
-138	val_138
-138	val_138
-138	val_138
-138	val_138
-143	val_143
-145	val_145
-146	val_146
-146	val_146
-149	val_149
-149	val_149
-15	val_15
-15	val_15
-150	val_150
-152	val_152
-152	val_152
-153	val_153
-155	val_155
-156	val_156
-157	val_157
-158	val_158
-160	val_160
-162	val_162
-163	val_163
-164	val_164
-164	val_164
-165	val_165
-165	val_165
-166	val_166
-167	val_167
-167	val_167
-167	val_167
-168	val_168
-169	val_169
-169	val_169
-169	val_169
-169	val_169
-17	val_17
-170	val_170
-172	val_172
-172	val_172
-174	val_174
-174	val_174
-175	val_175
-175	val_175
-176	val_176
-176	val_176
-177	val_177
-178	val_178
-179	val_179
-179	val_179
-18	val_18
-18	val_18
-180	val_180
-181	val_181
-183	val_183
-186	val_186
-187	val_187
-187	val_187
-187	val_187
-189	val_189
-19	val_19
-190	val_190
-191	val_191
-191	val_191
-192	val_192
-193	val_193
-193	val_193
-193	val_193
-194	val_194
-195	val_195
-195	val_195
-196	val_196
-197	val_197
-197	val_197
-199	val_199
-199	val_199
-199	val_199
-2	val_2


[22/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_12.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_12.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_12.q.out
new file mode 100644
index 0000000..0e11f3f
--- /dev/null
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_12.q.out
@@ -0,0 +1,424 @@
+PREHOOK: query: -- Ensure it works if skewed column is not the first column in the table columns
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
+create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 
+    partitioned by (ds String, hr String) 
+    skewed by (col2, col4) on (('466','val_466'),('287','val_287'),('82','val_82'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@list_bucketing_mul_col
+POSTHOOK: query: -- Ensure it works if skewed column is not the first column in the table columns
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
+create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 
+    partitioned by (ds String, hr String) 
+    skewed by (col2, col4) on (('466','val_466'),('287','val_287'),('82','val_82'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@list_bucketing_mul_col
+PREHOOK: query: -- list bucketing DML 
+explain extended
+insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08',  hr = '11')
+select 1, key, 1, value, 1 from src
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML 
+explain extended
+insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08',  hr = '11')
+select 1, key, 1, value, 1 from src
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: '1' (type: string), key (type: string), '1' (type: string), value (type: string), '1' (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/hr=11/
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns col1,col2,col3,col4,col5
+                      columns.comments 
+                      columns.types string:string:string:string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_mul_col
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_mul_col
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 500
+              rawDataSize 5312
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 500
+                rawDataSize 5312
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 11
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns col1,col2,col3,col4,col5
+                columns.comments 
+                columns.types string:string:string:string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_mul_col
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_mul_col
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08', hr = '11')
+select 1, key, 1, value, 1 from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08', hr = '11')
+select 1, key, 1, value, 1 from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col1 EXPRESSION []
+POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col2 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col3 EXPRESSION []
+POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col4 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col5 EXPRESSION []
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_mul_col
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_mul_col
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_mul_col
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_mul_col
+ds=2008-04-08/hr=11
+PREHOOK: query: desc formatted list_bucketing_mul_col partition (ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_mul_col
+POSTHOOK: query: desc formatted list_bucketing_mul_col partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_mul_col
+# col_name            	data_type           	comment             
+	 	 
+col1                	string              	                    
+col2                	string              	                    
+col3                	string              	                    
+col4                	string              	                    
+col5                	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	list_bucketing_mul_col	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	4                   
+	numRows             	500                 
+	rawDataSize         	6312                
+	totalSize           	7094                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[col2, col4]        	 
+Skewed Values:      	[[466, val_466], [287, val_287], [82, val_82]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[466, val_466]=/list_bucketing_mul_col/ds=2008-04-08/hr=11/col2=466/col4=val_466, [287, val_287]=/list_bucketing_mul_col/ds=2008-04-08/hr=11/col2=287/col4=val_287, [82, val_82]=/list_bucketing_mul_col/ds=2008-04-08/hr=11/col2=82/col4=val_82}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: explain extended
+select * from list_bucketing_mul_col 
+where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from list_bucketing_mul_col 
+where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+              bucket_count -1
+              columns col1,col2,col3,col4,col5
+              columns.comments 
+              columns.types string:string:string:string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_mul_col
+              numFiles 4
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 6312
+              serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 7094
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns col1,col2,col3,col4,col5
+                columns.comments 
+                columns.types string:string:string:string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_mul_col
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_mul_col
+            name: default.list_bucketing_mul_col
+      Processor Tree:
+        TableScan
+          alias: list_bucketing_mul_col
+          Statistics: Num rows: 500 Data size: 6312 Basic stats: COMPLETE Column stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((col2 = '466') and (col4 = 'val_466')) (type: boolean)
+            Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: col1 (type: string), '466' (type: string), col3 (type: string), 'val_466' (type: string), col5 (type: string), '2008-04-08' (type: string), '11' (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+              Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
+              ListSink
+
+PREHOOK: query: select * from list_bucketing_mul_col 
+where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_mul_col
+PREHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select * from list_bucketing_mul_col 
+where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_mul_col
+POSTHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+1	466	1	val_466	1	2008-04-08	11
+1	466	1	val_466	1	2008-04-08	11
+1	466	1	val_466	1	2008-04-08	11
+PREHOOK: query: explain extended
+select * from list_bucketing_mul_col 
+where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from list_bucketing_mul_col 
+where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+              bucket_count -1
+              columns col1,col2,col3,col4,col5
+              columns.comments 
+              columns.types string:string:string:string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_mul_col
+              numFiles 4
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 6312
+              serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 7094
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns col1,col2,col3,col4,col5
+                columns.comments 
+                columns.types string:string:string:string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_mul_col
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_mul_col
+            name: default.list_bucketing_mul_col
+      Processor Tree:
+        TableScan
+          alias: list_bucketing_mul_col
+          Statistics: Num rows: 500 Data size: 6312 Basic stats: COMPLETE Column stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((col2 = '382') and (col4 = 'val_382')) (type: boolean)
+            Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: col1 (type: string), '382' (type: string), col3 (type: string), 'val_382' (type: string), col5 (type: string), '2008-04-08' (type: string), '11' (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+              Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
+              ListSink
+
+PREHOOK: query: select * from list_bucketing_mul_col 
+where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_mul_col
+PREHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select * from list_bucketing_mul_col 
+where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_mul_col
+POSTHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+1	382	1	val_382	1	2008-04-08	11
+1	382	1	val_382	1	2008-04-08	11
+PREHOOK: query: drop table list_bucketing_mul_col
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@list_bucketing_mul_col
+PREHOOK: Output: default@list_bucketing_mul_col
+POSTHOOK: query: drop table list_bucketing_mul_col
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@list_bucketing_mul_col
+POSTHOOK: Output: default@list_bucketing_mul_col

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.7.out
deleted file mode 100644
index bfce335..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.7.out
+++ /dev/null
@@ -1,337 +0,0 @@
-PREHOOK: query: -- Ensure skewed value map has escaped directory name
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
-create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 
-    partitioned by (ds String, hr String) 
-    skewed by (col2, col4) on (('466','val_466'),('287','val_287'),('82','val_82'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_mul_col
-POSTHOOK: query: -- Ensure skewed value map has escaped directory name
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
-create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 
-    partitioned by (ds String, hr String) 
-    skewed by (col2, col4) on (('466','val_466'),('287','val_287'),('82','val_82'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_mul_col
-PREHOOK: query: -- list bucketing DML 
-explain extended
-insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08',  hr = '2013-01-23+18:00:99')
-select 1, key, 1, value, 1 from src
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML 
-explain extended
-insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08',  hr = '2013-01-23+18:00:99')
-select 1, key, 1, value, 1 from src
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: '1' (type: string), key (type: string), '1' (type: string), value (type: string), '1' (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns col1,col2,col3,col4,col5
-                      columns.comments 
-                      columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_mul_col
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_mul_col
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-        /src [src]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 2013-01-23+18:00:99
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns col1,col2,col3,col4,col5
-                columns.comments 
-                columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_mul_col
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_mul_col
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08', hr = '2013-01-23+18:00:99')
-select 1, key, 1, value, 1 from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
-POSTHOOK: query: insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08', hr = '2013-01-23+18:00:99')
-select 1, key, 1, value, 1 from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col1 EXPRESSION []
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col2 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col3 EXPRESSION []
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col4 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col5 EXPRESSION []
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_mul_col
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_mul_col
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_mul_col
-ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
-PREHOOK: query: desc formatted list_bucketing_mul_col partition (ds='2008-04-08', hr='2013-01-23+18:00:99')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: query: desc formatted list_bucketing_mul_col partition (ds='2008-04-08', hr='2013-01-23+18:00:99')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_mul_col
-# col_name            	data_type           	comment             
-	 	 
-col1                	string              	                    
-col2                	string              	                    
-col3                	string              	                    
-col4                	string              	                    
-col5                	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 2013-01-23+18:00:99]	 
-Database:           	default             	 
-Table:              	list_bucketing_mul_col	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	4                   
-	numRows             	500                 
-	rawDataSize         	6312                
-	totalSize           	7094                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[col2, col4]        	 
-Skewed Values:      	[[466, val_466], [287, val_287], [82, val_82]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[82, val_82]=/list_bucketing_mul_col/ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/col2=82/col4=val_82, [466, val_466]=/list_bucketing_mul_col/ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/col2=466/col4=val_466, [287, val_287]=/list_bucketing_mul_col/ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/col2=287/col4=val_287}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: explain extended
-select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 2013-01-23+18:00:99
-            properties:
-              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-              bucket_count -1
-              columns col1,col2,col3,col4,col5
-              columns.comments 
-              columns.types string:string:string:string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_mul_col
-              numFiles 4
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 6312
-              serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 7094
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns col1,col2,col3,col4,col5
-                columns.comments 
-                columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_mul_col
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_mul_col
-            name: default.list_bucketing_mul_col
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_mul_col
-          Statistics: Num rows: 500 Data size: 6312 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((col2 = '466') and (col4 = 'val_466')) (type: boolean)
-            Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: col1 (type: string), '466' (type: string), col3 (type: string), 'val_466' (type: string), col5 (type: string), '2008-04-08' (type: string), '2013-01-23+18:00:99' (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_mul_col
-PREHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
-#### A masked pattern was here ####
-1	466	1	val_466	1	2008-04-08	2013-01-23+18:00:99
-1	466	1	val_466	1	2008-04-08	2013-01-23+18:00:99
-1	466	1	val_466	1	2008-04-08	2013-01-23+18:00:99
-PREHOOK: query: drop table list_bucketing_mul_col
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_mul_col
-PREHOOK: Output: default@list_bucketing_mul_col
-POSTHOOK: query: drop table list_bucketing_mul_col
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: Output: default@list_bucketing_mul_col

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.8.out b/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.8.out
deleted file mode 100644
index f7a1039..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.8.out
+++ /dev/null
@@ -1,439 +0,0 @@
-PREHOOK: query: -- Ensure skewed value map has escaped directory name
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
-create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 
-    partitioned by (ds String, hr String) 
-    skewed by (col2, col4) on (('466','val_466'),('287','val_287'),('82','val_82'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_mul_col
-POSTHOOK: query: -- Ensure skewed value map has escaped directory name
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
-create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 
-    partitioned by (ds String, hr String) 
-    skewed by (col2, col4) on (('466','val_466'),('287','val_287'),('82','val_82'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_mul_col
-PREHOOK: query: -- list bucketing DML 
-explain extended
-insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08',  hr = '2013-01-23+18:00:99')
-select 1, key, 1, value, 1 from src
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML 
-explain extended
-insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08',  hr = '2013-01-23+18:00:99')
-select 1, key, 1, value, 1 from src
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            src
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_mul_col
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-                  '2013-01-23+18:00:99'
-      TOK_SELECT
-         TOK_SELEXPR
-            1
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            1
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-         TOK_SELEXPR
-            1
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: UDFToString(1) (type: string), key (type: string), UDFToString(1) (type: string), value (type: string), UDFToString(1) (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns col1,col2,col3,col4,col5
-                      columns.comments 
-                      columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_mul_col
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_mul_col
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE true
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-        /src [$hdt$_0:src]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 2013-01-23+18:00:99
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns col1,col2,col3,col4,col5
-                columns.comments 
-                columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_mul_col
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_mul_col
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08', hr = '2013-01-23+18:00:99')
-select 1, key, 1, value, 1 from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
-POSTHOOK: query: insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08', hr = '2013-01-23+18:00:99')
-select 1, key, 1, value, 1 from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col1 EXPRESSION []
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col2 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col3 EXPRESSION []
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col4 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col5 EXPRESSION []
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_mul_col
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_mul_col
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_mul_col
-ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
-PREHOOK: query: desc formatted list_bucketing_mul_col partition (ds='2008-04-08', hr='2013-01-23+18:00:99')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: query: desc formatted list_bucketing_mul_col partition (ds='2008-04-08', hr='2013-01-23+18:00:99')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_mul_col
-# col_name            	data_type           	comment             
-	 	 
-col1                	string              	                    
-col2                	string              	                    
-col3                	string              	                    
-col4                	string              	                    
-col5                	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 2013-01-23+18:00:99]	 
-Database:           	default             	 
-Table:              	list_bucketing_mul_col	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	4                   
-	numRows             	500                 
-	rawDataSize         	6312                
-	totalSize           	7094                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[col2, col4]        	 
-Skewed Values:      	[[466, val_466], [287, val_287], [82, val_82]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[466, val_466]=/list_bucketing_mul_col/ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/col2=466/col4=val_466, [287, val_287]=/list_bucketing_mul_col/ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/col2=287/col4=val_287, [82, val_82]=/list_bucketing_mul_col/ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/col2=82/col4=val_82}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: explain extended
-select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            list_bucketing_mul_col
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_ALLCOLREF
-      TOK_WHERE
-         and
-            and
-               and
-                  =
-                     TOK_TABLE_OR_COL
-                        ds
-                     '2008-04-08'
-                  =
-                     TOK_TABLE_OR_COL
-                        hr
-                     '2013-01-23+18:00:99'
-               =
-                  TOK_TABLE_OR_COL
-                     col2
-                  "466"
-            =
-               TOK_TABLE_OR_COL
-                  col4
-               "val_466"
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: list_bucketing_mul_col
-            Statistics: Num rows: 500 Data size: 6312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: ((col2 = '466') and (col4 = 'val_466')) (type: boolean)
-              Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: col1 (type: string), '466' (type: string), col3 (type: string), 'val_466' (type: string), col5 (type: string), '2008-04-08' (type: string), '2013-01-23+18:00:99' (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-                  Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0,_col1,_col2,_col3,_col4,_col5,_col6
-                        columns.types string:string:string:string:string:string:string
-                        escape.delim \
-                        hive.serialization.extend.nesting.levels true
-                        serialization.format 1
-                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: col4=val_466
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 2013-01-23+18:00:99
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns col1,col2,col3,col4,col5
-              columns.comments 
-              columns.types string:string:string:string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_mul_col
-              numFiles 4
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 6312
-              serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 7094
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns col1,col2,col3,col4,col5
-                columns.comments 
-                columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_mul_col
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_mul_col
-            name: default.list_bucketing_mul_col
-      Truncated Path -> Alias:
-        /list_bucketing_mul_col/ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/col2=466/col4=val_466 [list_bucketing_mul_col]
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_mul_col
-PREHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
-#### A masked pattern was here ####
-1	466	1	val_466	1	2008-04-08	2013-01-23+18:00:99
-1	466	1	val_466	1	2008-04-08	2013-01-23+18:00:99
-1	466	1	val_466	1	2008-04-08	2013-01-23+18:00:99
-PREHOOK: query: drop table list_bucketing_mul_col
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_mul_col
-PREHOOK: Output: default@list_bucketing_mul_col
-POSTHOOK: query: drop table list_bucketing_mul_col
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: Output: default@list_bucketing_mul_col

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_13.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_13.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_13.q.out
new file mode 100644
index 0000000..93ebef0
--- /dev/null
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_13.q.out
@@ -0,0 +1,335 @@
+PREHOOK: query: -- Ensure skewed value map has escaped directory name
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
+create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 
+    partitioned by (ds String, hr String) 
+    skewed by (col2, col4) on (('466','val_466'),('287','val_287'),('82','val_82'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@list_bucketing_mul_col
+POSTHOOK: query: -- Ensure skewed value map has escaped directory name
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
+create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 
+    partitioned by (ds String, hr String) 
+    skewed by (col2, col4) on (('466','val_466'),('287','val_287'),('82','val_82'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@list_bucketing_mul_col
+PREHOOK: query: -- list bucketing DML 
+explain extended
+insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08',  hr = '2013-01-23+18:00:99')
+select 1, key, 1, value, 1 from src
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML 
+explain extended
+insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08',  hr = '2013-01-23+18:00:99')
+select 1, key, 1, value, 1 from src
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: '1' (type: string), key (type: string), '1' (type: string), value (type: string), '1' (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns col1,col2,col3,col4,col5
+                      columns.comments 
+                      columns.types string:string:string:string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_mul_col
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_mul_col
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 500
+              rawDataSize 5312
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 500
+                rawDataSize 5312
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 2013-01-23+18:00:99
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns col1,col2,col3,col4,col5
+                columns.comments 
+                columns.types string:string:string:string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_mul_col
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_mul_col
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08', hr = '2013-01-23+18:00:99')
+select 1, key, 1, value, 1 from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
+POSTHOOK: query: insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08', hr = '2013-01-23+18:00:99')
+select 1, key, 1, value, 1 from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
+POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col1 EXPRESSION []
+POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col2 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col3 EXPRESSION []
+POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col4 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=2013-01-23+18:00:99).col5 EXPRESSION []
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_mul_col
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_mul_col
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_mul_col
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_mul_col
+ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
+PREHOOK: query: desc formatted list_bucketing_mul_col partition (ds='2008-04-08', hr='2013-01-23+18:00:99')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_mul_col
+POSTHOOK: query: desc formatted list_bucketing_mul_col partition (ds='2008-04-08', hr='2013-01-23+18:00:99')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_mul_col
+# col_name            	data_type           	comment             
+	 	 
+col1                	string              	                    
+col2                	string              	                    
+col3                	string              	                    
+col4                	string              	                    
+col5                	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 2013-01-23+18:00:99]	 
+Database:           	default             	 
+Table:              	list_bucketing_mul_col	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	4                   
+	numRows             	500                 
+	rawDataSize         	6312                
+	totalSize           	7094                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[col2, col4]        	 
+Skewed Values:      	[[466, val_466], [287, val_287], [82, val_82]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[466, val_466]=/list_bucketing_mul_col/ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/col2=466/col4=val_466, [287, val_287]=/list_bucketing_mul_col/ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/col2=287/col4=val_287, [82, val_82]=/list_bucketing_mul_col/ds=2008-04-08/hr=2013-01-23+18%3A00%3A99/col2=82/col4=val_82}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: explain extended
+select * from list_bucketing_mul_col 
+where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from list_bucketing_mul_col 
+where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 2013-01-23+18:00:99
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+              bucket_count -1
+              columns col1,col2,col3,col4,col5
+              columns.comments 
+              columns.types string:string:string:string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_mul_col
+              numFiles 4
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 6312
+              serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 7094
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns col1,col2,col3,col4,col5
+                columns.comments 
+                columns.types string:string:string:string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_mul_col
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_mul_col
+            name: default.list_bucketing_mul_col
+      Processor Tree:
+        TableScan
+          alias: list_bucketing_mul_col
+          Statistics: Num rows: 500 Data size: 6312 Basic stats: COMPLETE Column stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((col2 = '466') and (col4 = 'val_466')) (type: boolean)
+            Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: col1 (type: string), '466' (type: string), col3 (type: string), 'val_466' (type: string), col5 (type: string), '2008-04-08' (type: string), '2013-01-23+18:00:99' (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+              Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
+              ListSink
+
+PREHOOK: query: select * from list_bucketing_mul_col 
+where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_mul_col
+PREHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
+#### A masked pattern was here ####
+POSTHOOK: query: select * from list_bucketing_mul_col 
+where ds='2008-04-08' and hr='2013-01-23+18:00:99' and col2 = "466" and col4 = "val_466"
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_mul_col
+POSTHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=2013-01-23+18%3A00%3A99
+#### A masked pattern was here ####
+1	466	1	val_466	1	2008-04-08	2013-01-23+18:00:99
+1	466	1	val_466	1	2008-04-08	2013-01-23+18:00:99
+1	466	1	val_466	1	2008-04-08	2013-01-23+18:00:99
+PREHOOK: query: drop table list_bucketing_mul_col
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@list_bucketing_mul_col
+PREHOOK: Output: default@list_bucketing_mul_col
+POSTHOOK: query: drop table list_bucketing_mul_col
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@list_bucketing_mul_col
+POSTHOOK: Output: default@list_bucketing_mul_col


[05/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.8.out b/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.8.out
deleted file mode 100644
index 899723f..0000000
--- a/ql/src/test/results/clientpositive/subquery_multiinsert.q.java1.8.out
+++ /dev/null
@@ -1,999 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE src_4(
-  key STRING, 
-  value STRING
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@src_4
-POSTHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE src_4(
-  key STRING, 
-  value STRING
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@src_4
-RUN: Stage-0:DDL
-PREHOOK: query: CREATE TABLE src_5( 
-  key STRING, 
-  value STRING
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@src_5
-POSTHOOK: query: CREATE TABLE src_5( 
-  key STRING, 
-  value STRING
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@src_5
-RUN: Stage-0:DDL
-Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-10 is a root stage
-  Stage-2 depends on stages: Stage-10
-  Stage-3 depends on stages: Stage-2
-  Stage-4 depends on stages: Stage-3
-  Stage-1 depends on stages: Stage-4
-  Stage-5 depends on stages: Stage-1
-  Stage-6 depends on stages: Stage-2
-  Stage-0 depends on stages: Stage-6
-  Stage-7 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-10
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: s1
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((key > '2') and key is null) (type: boolean)
-              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  aggregations: count()
-                  mode: hash
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    sort order: 
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col0 (type: bigint)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: count(VALUE._col0)
-          mode: mergepartial
-          outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: (_col0 = 0) (type: boolean)
-            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                keys: 0 (type: bigint)
-                mode: hash
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              value expressions: key (type: string), value (type: string)
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Semi Join 0 to 1
-          keys:
-            0 
-            1 
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string)
-              sort order: +
-              Map-reduce partition columns: _col0 (type: string)
-              Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col1 (type: string)
-          TableScan
-            alias: s1
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: (key > '2') (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Outer Join0 to 1
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0, _col1, _col5
-          Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: _col5 is null (type: boolean)
-            Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: _col0 (type: string), _col1 (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-4
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string)
-              sort order: +
-              Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col1 (type: string)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.src_5
-
-  Stage: Stage-1
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_5
-
-  Stage: Stage-5
-    Stats-Aggr Operator
-
-  Stage: Stage-6
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: key (type: string), value (type: string)
-              sort order: ++
-              Map-reduce partition columns: key (type: string), value (type: string)
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-          TableScan
-            alias: a
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((key > '9') and value is not null) (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  keys: _col0 (type: string), _col1 (type: string)
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string), _col1 (type: string)
-                    sort order: ++
-                    Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Semi Join 0 to 1
-          keys:
-            0 key (type: string), value (type: string)
-            1 _col0 (type: string), _col1 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.src_4
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_4
-
-  Stage: Stage-7
-    Stats-Aggr Operator
-
-Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@src_4
-PREHOOK: Output: default@src_5
-POSTHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@src_4
-POSTHOOK: Output: default@src_5
-POSTHOOK: Lineage: src_4.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_4.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-RUN: Stage-10:MAPRED
-RUN: Stage-2:MAPRED
-RUN: Stage-3:MAPRED
-RUN: Stage-6:MAPRED
-RUN: Stage-4:MAPRED
-RUN: Stage-0:MOVE
-RUN: Stage-1:MOVE
-RUN: Stage-7:STATS
-RUN: Stage-5:STATS
-PREHOOK: query: select * from src_4
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_4
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_4
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_4
-#### A masked pattern was here ####
-90	val_90
-90	val_90
-90	val_90
-92	val_92
-95	val_95
-95	val_95
-96	val_96
-97	val_97
-97	val_97
-98	val_98
-98	val_98
-PREHOOK: query: select * from src_5
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_5
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_5
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_5
-#### A masked pattern was here ####
-0	val_0
-0	val_0
-0	val_0
-10	val_10
-100	val_100
-100	val_100
-103	val_103
-103	val_103
-104	val_104
-104	val_104
-105	val_105
-11	val_11
-111	val_111
-113	val_113
-113	val_113
-114	val_114
-116	val_116
-118	val_118
-118	val_118
-119	val_119
-119	val_119
-119	val_119
-12	val_12
-12	val_12
-120	val_120
-120	val_120
-125	val_125
-125	val_125
-126	val_126
-128	val_128
-128	val_128
-128	val_128
-129	val_129
-129	val_129
-131	val_131
-133	val_133
-134	val_134
-134	val_134
-136	val_136
-137	val_137
-137	val_137
-138	val_138
-138	val_138
-138	val_138
-138	val_138
-143	val_143
-145	val_145
-146	val_146
-146	val_146
-149	val_149
-149	val_149
-15	val_15
-15	val_15
-150	val_150
-152	val_152
-152	val_152
-153	val_153
-155	val_155
-156	val_156
-157	val_157
-158	val_158
-160	val_160
-162	val_162
-163	val_163
-164	val_164
-164	val_164
-165	val_165
-165	val_165
-166	val_166
-167	val_167
-167	val_167
-167	val_167
-168	val_168
-169	val_169
-169	val_169
-169	val_169
-169	val_169
-17	val_17
-170	val_170
-172	val_172
-172	val_172
-174	val_174
-174	val_174
-175	val_175
-175	val_175
-176	val_176
-176	val_176
-177	val_177
-178	val_178
-179	val_179
-179	val_179
-18	val_18
-18	val_18
-180	val_180
-181	val_181
-183	val_183
-186	val_186
-187	val_187
-187	val_187
-187	val_187
-189	val_189
-19	val_19
-190	val_190
-191	val_191
-191	val_191
-192	val_192
-193	val_193
-193	val_193
-193	val_193
-194	val_194
-195	val_195
-195	val_195
-196	val_196
-197	val_197
-197	val_197
-199	val_199
-199	val_199
-199	val_199
-2	val_2
-Warning: Map Join MAPJOIN[55][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
-Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-10 is a root stage
-  Stage-14 depends on stages: Stage-10 , consists of Stage-17, Stage-2
-  Stage-17 has a backup stage: Stage-2
-  Stage-13 depends on stages: Stage-17
-  Stage-15 depends on stages: Stage-2, Stage-13
-  Stage-4 depends on stages: Stage-15
-  Stage-1 depends on stages: Stage-4
-  Stage-5 depends on stages: Stage-1
-  Stage-16 depends on stages: Stage-2, Stage-13
-  Stage-12 depends on stages: Stage-16
-  Stage-0 depends on stages: Stage-12
-  Stage-7 depends on stages: Stage-0
-  Stage-2
-
-STAGE PLANS:
-  Stage: Stage-10
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: s1
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((key > '2') and key is null) (type: boolean)
-              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  aggregations: count()
-                  mode: hash
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    sort order: 
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col0 (type: bigint)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: count(VALUE._col0)
-          mode: mergepartial
-          outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: (_col0 = 0) (type: boolean)
-            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                keys: 0 (type: bigint)
-                mode: hash
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-14
-    Conditional Operator
-
-  Stage: Stage-17
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $INTNAME 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        $INTNAME 
-          TableScan
-            HashTable Sink Operator
-              keys:
-                0 
-                1 
-
-  Stage: Stage-13
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Map Join Operator
-              condition map:
-                   Left Semi Join 0 to 1
-              keys:
-                0 
-                1 
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-      Local Work:
-        Map Reduce Local Work
-
-  Stage: Stage-15
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        sq_2:s1 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        sq_2:s1 
-          TableScan
-            alias: s1
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: (key > '2') (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                HashTable Sink Operator
-                  keys:
-                    0 _col0 (type: string)
-                    1 _col0 (type: string)
-
-  Stage: Stage-4
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Map Join Operator
-              condition map:
-                   Left Outer Join0 to 1
-              keys:
-                0 _col0 (type: string)
-                1 _col0 (type: string)
-              outputColumnNames: _col0, _col1, _col5
-              Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE Column stats: NONE
-              Filter Operator
-                predicate: _col5 is null (type: boolean)
-                Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: string), _col1 (type: string)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    sort order: +
-                    Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col1 (type: string)
-      Local Work:
-        Map Reduce Local Work
-      Reduce Operator Tree:
-        Select Operator
-          expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.src_5
-
-  Stage: Stage-1
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_5
-
-  Stage: Stage-5
-    Stats-Aggr Operator
-
-  Stage: Stage-16
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        sq_1:a 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        sq_1:a 
-          TableScan
-            alias: a
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((key > '9') and value is not null) (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  keys: _col0 (type: string), _col1 (type: string)
-                  mode: hash
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                  HashTable Sink Operator
-                    keys:
-                      0 key (type: string), value (type: string)
-                      1 _col0 (type: string), _col1 (type: string)
-
-  Stage: Stage-12
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Map Join Operator
-              condition map:
-                   Left Semi Join 0 to 1
-              keys:
-                0 key (type: string), value (type: string)
-                1 _col0 (type: string), _col1 (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src_4
-      Local Work:
-        Map Reduce Local Work
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_4
-
-  Stage: Stage-7
-    Stats-Aggr Operator
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              value expressions: key (type: string), value (type: string)
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Semi Join 0 to 1
-          keys:
-            0 
-            1 
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-Warning: Map Join MAPJOIN[55][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
-Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@src_4
-PREHOOK: Output: default@src_5
-POSTHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@src_4
-POSTHOOK: Output: default@src_5
-POSTHOOK: Lineage: src_4.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_4.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-RUN: Stage-10:MAPRED
-RUN: Stage-14:CONDITIONAL
-RUN: Stage-17:MAPREDLOCAL
-RUN: Stage-13:MAPRED
-RUN: Stage-15:MAPREDLOCAL
-RUN: Stage-16:MAPREDLOCAL
-RUN: Stage-4:MAPRED
-RUN: Stage-12:MAPRED
-RUN: Stage-1:MOVE
-RUN: Stage-0:MOVE
-RUN: Stage-5:STATS
-RUN: Stage-7:STATS
-PREHOOK: query: select * from src_4
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_4
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_4
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_4
-#### A masked pattern was here ####
-90	val_90
-90	val_90
-90	val_90
-92	val_92
-95	val_95
-95	val_95
-96	val_96
-97	val_97
-97	val_97
-98	val_98
-98	val_98
-PREHOOK: query: select * from src_5
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_5
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_5
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_5
-#### A masked pattern was here ####
-0	val_0
-0	val_0
-0	val_0
-10	val_10
-100	val_100
-100	val_100
-103	val_103
-103	val_103
-104	val_104
-104	val_104
-105	val_105
-11	val_11
-111	val_111
-113	val_113
-113	val_113
-114	val_114
-116	val_116
-118	val_118
-118	val_118
-119	val_119
-119	val_119
-119	val_119
-12	val_12
-12	val_12
-120	val_120
-120	val_120
-125	val_125
-125	val_125
-126	val_126
-128	val_128
-128	val_128
-128	val_128
-129	val_129
-129	val_129
-131	val_131
-133	val_133
-134	val_134
-134	val_134
-136	val_136
-137	val_137
-137	val_137
-138	val_138
-138	val_138
-138	val_138
-138	val_138
-143	val_143
-145	val_145
-146	val_146
-146	val_146
-149	val_149
-149	val_149
-15	val_15
-15	val_15
-150	val_150
-152	val_152
-152	val_152
-153	val_153
-155	val_155
-156	val_156
-157	val_157
-158	val_158
-160	val_160
-162	val_162
-163	val_163
-164	val_164
-164	val_164
-165	val_165
-165	val_165
-166	val_166
-167	val_167
-167	val_167
-167	val_167
-168	val_168
-169	val_169
-169	val_169
-169	val_169
-169	val_169
-17	val_17
-170	val_170
-172	val_172
-172	val_172
-174	val_174
-174	val_174
-175	val_175
-175	val_175
-176	val_176
-176	val_176
-177	val_177
-178	val_178
-179	val_179
-179	val_179
-18	val_18
-18	val_18
-180	val_180
-181	val_181
-183	val_183
-186	val_186
-187	val_187
-187	val_187
-187	val_187
-189	val_189
-19	val_19
-190	val_190
-191	val_191
-191	val_191
-192	val_192
-193	val_193
-193	val_193
-193	val_193
-194	val_194
-195	val_195
-195	val_195
-196	val_196
-197	val_197
-197	val_197
-199	val_199
-199	val_199
-199	val_199
-2	val_2

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/subquery_multiinsert.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/subquery_multiinsert.q.out b/ql/src/test/results/clientpositive/subquery_multiinsert.q.out
new file mode 100644
index 0000000..ff3abc4
--- /dev/null
+++ b/ql/src/test/results/clientpositive/subquery_multiinsert.q.out
@@ -0,0 +1,997 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE src_4(
+  key STRING, 
+  value STRING
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@src_4
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+CREATE TABLE src_4(
+  key STRING, 
+  value STRING
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@src_4
+RUN: Stage-0:DDL
+PREHOOK: query: CREATE TABLE src_5( 
+  key STRING, 
+  value STRING
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@src_5
+POSTHOOK: query: CREATE TABLE src_5( 
+  key STRING, 
+  value STRING
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@src_5
+RUN: Stage-0:DDL
+Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: explain
+from src b 
+INSERT OVERWRITE TABLE src_4 
+  select * 
+  where b.key in 
+   (select a.key 
+    from src a 
+    where b.value = a.value and a.key > '9'
+   ) 
+INSERT OVERWRITE TABLE src_5 
+  select *  
+  where b.key not in  ( select key from src s1 where s1.key > '2') 
+  order by key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+from src b 
+INSERT OVERWRITE TABLE src_4 
+  select * 
+  where b.key in 
+   (select a.key 
+    from src a 
+    where b.value = a.value and a.key > '9'
+   ) 
+INSERT OVERWRITE TABLE src_5 
+  select *  
+  where b.key not in  ( select key from src s1 where s1.key > '2') 
+  order by key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-10 is a root stage
+  Stage-2 depends on stages: Stage-10
+  Stage-3 depends on stages: Stage-2
+  Stage-4 depends on stages: Stage-3
+  Stage-1 depends on stages: Stage-4
+  Stage-5 depends on stages: Stage-1
+  Stage-6 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-6
+  Stage-7 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-10
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: s1
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: ((key > '2') and key is null) (type: boolean)
+              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count()
+                  mode: hash
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    sort order: 
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: (_col0 = 0) (type: boolean)
+            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                keys: 0 (type: bigint)
+                mode: hash
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+              value expressions: key (type: string), value (type: string)
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Semi Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              key expressions: _col0 (type: string)
+              sort order: +
+              Map-reduce partition columns: _col0 (type: string)
+              Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
+              value expressions: _col1 (type: string)
+          TableScan
+            alias: s1
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key > '2') (type: boolean)
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Outer Join0 to 1
+          keys:
+            0 _col0 (type: string)
+            1 _col0 (type: string)
+          outputColumnNames: _col0, _col1, _col5
+          Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: _col5 is null (type: boolean)
+            Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: string), _col1 (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              key expressions: _col0 (type: string)
+              sort order: +
+              Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
+              value expressions: _col1 (type: string)
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                name: default.src_5
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src_5
+
+  Stage: Stage-5
+    Stats-Aggr Operator
+
+  Stage: Stage-6
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              key expressions: key (type: string), value (type: string)
+              sort order: ++
+              Map-reduce partition columns: key (type: string), value (type: string)
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+          TableScan
+            alias: a
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: ((key > '9') and value is not null) (type: boolean)
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  keys: _col0 (type: string), _col1 (type: string)
+                  mode: hash
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string), _col1 (type: string)
+                    sort order: ++
+                    Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Semi Join 0 to 1
+          keys:
+            0 key (type: string), value (type: string)
+            1 _col0 (type: string), _col1 (type: string)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                name: default.src_4
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src_4
+
+  Stage: Stage-7
+    Stats-Aggr Operator
+
+Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: from src b 
+INSERT OVERWRITE TABLE src_4 
+  select * 
+  where b.key in 
+   (select a.key 
+    from src a 
+    where b.value = a.value and a.key > '9'
+   ) 
+INSERT OVERWRITE TABLE src_5 
+  select *  
+  where b.key not in  ( select key from src s1 where s1.key > '2') 
+  order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@src_4
+PREHOOK: Output: default@src_5
+POSTHOOK: query: from src b 
+INSERT OVERWRITE TABLE src_4 
+  select * 
+  where b.key in 
+   (select a.key 
+    from src a 
+    where b.value = a.value and a.key > '9'
+   ) 
+INSERT OVERWRITE TABLE src_5 
+  select *  
+  where b.key not in  ( select key from src s1 where s1.key > '2') 
+  order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@src_4
+POSTHOOK: Output: default@src_5
+POSTHOOK: Lineage: src_4.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+RUN: Stage-10:MAPRED
+RUN: Stage-2:MAPRED
+RUN: Stage-3:MAPRED
+RUN: Stage-6:MAPRED
+RUN: Stage-4:MAPRED
+RUN: Stage-0:MOVE
+RUN: Stage-1:MOVE
+RUN: Stage-7:STATS
+RUN: Stage-5:STATS
+PREHOOK: query: select * from src_4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_4
+#### A masked pattern was here ####
+POSTHOOK: query: select * from src_4
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_4
+#### A masked pattern was here ####
+90	val_90
+90	val_90
+90	val_90
+92	val_92
+95	val_95
+95	val_95
+96	val_96
+97	val_97
+97	val_97
+98	val_98
+98	val_98
+PREHOOK: query: select * from src_5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_5
+#### A masked pattern was here ####
+POSTHOOK: query: select * from src_5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_5
+#### A masked pattern was here ####
+0	val_0
+0	val_0
+0	val_0
+10	val_10
+100	val_100
+100	val_100
+103	val_103
+103	val_103
+104	val_104
+104	val_104
+105	val_105
+11	val_11
+111	val_111
+113	val_113
+113	val_113
+114	val_114
+116	val_116
+118	val_118
+118	val_118
+119	val_119
+119	val_119
+119	val_119
+12	val_12
+12	val_12
+120	val_120
+120	val_120
+125	val_125
+125	val_125
+126	val_126
+128	val_128
+128	val_128
+128	val_128
+129	val_129
+129	val_129
+131	val_131
+133	val_133
+134	val_134
+134	val_134
+136	val_136
+137	val_137
+137	val_137
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+143	val_143
+145	val_145
+146	val_146
+146	val_146
+149	val_149
+149	val_149
+15	val_15
+15	val_15
+150	val_150
+152	val_152
+152	val_152
+153	val_153
+155	val_155
+156	val_156
+157	val_157
+158	val_158
+160	val_160
+162	val_162
+163	val_163
+164	val_164
+164	val_164
+165	val_165
+165	val_165
+166	val_166
+167	val_167
+167	val_167
+167	val_167
+168	val_168
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+17	val_17
+170	val_170
+172	val_172
+172	val_172
+174	val_174
+174	val_174
+175	val_175
+175	val_175
+176	val_176
+176	val_176
+177	val_177
+178	val_178
+179	val_179
+179	val_179
+18	val_18
+18	val_18
+180	val_180
+181	val_181
+183	val_183
+186	val_186
+187	val_187
+187	val_187
+187	val_187
+189	val_189
+19	val_19
+190	val_190
+191	val_191
+191	val_191
+192	val_192
+193	val_193
+193	val_193
+193	val_193
+194	val_194
+195	val_195
+195	val_195
+196	val_196
+197	val_197
+197	val_197
+199	val_199
+199	val_199
+199	val_199
+2	val_2
+Warning: Map Join MAPJOIN[55][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
+Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: explain
+from src b 
+INSERT OVERWRITE TABLE src_4 
+  select * 
+  where b.key in 
+   (select a.key 
+    from src a 
+    where b.value = a.value and a.key > '9'
+   ) 
+INSERT OVERWRITE TABLE src_5 
+  select *  
+  where b.key not in  ( select key from src s1 where s1.key > '2') 
+  order by key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+from src b 
+INSERT OVERWRITE TABLE src_4 
+  select * 
+  where b.key in 
+   (select a.key 
+    from src a 
+    where b.value = a.value and a.key > '9'
+   ) 
+INSERT OVERWRITE TABLE src_5 
+  select *  
+  where b.key not in  ( select key from src s1 where s1.key > '2') 
+  order by key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-10 is a root stage
+  Stage-14 depends on stages: Stage-10 , consists of Stage-17, Stage-2
+  Stage-17 has a backup stage: Stage-2
+  Stage-13 depends on stages: Stage-17
+  Stage-15 depends on stages: Stage-2, Stage-13
+  Stage-4 depends on stages: Stage-15
+  Stage-1 depends on stages: Stage-4
+  Stage-5 depends on stages: Stage-1
+  Stage-16 depends on stages: Stage-2, Stage-13
+  Stage-12 depends on stages: Stage-16
+  Stage-0 depends on stages: Stage-12
+  Stage-7 depends on stages: Stage-0
+  Stage-2
+
+STAGE PLANS:
+  Stage: Stage-10
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: s1
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: ((key > '2') and key is null) (type: boolean)
+              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count()
+                  mode: hash
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    sort order: 
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: (_col0 = 0) (type: boolean)
+            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                keys: 0 (type: bigint)
+                mode: hash
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-14
+    Conditional Operator
+
+  Stage: Stage-17
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $INTNAME 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $INTNAME 
+          TableScan
+            HashTable Sink Operator
+              keys:
+                0 
+                1 
+
+  Stage: Stage-13
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Map Join Operator
+              condition map:
+                   Left Semi Join 0 to 1
+              keys:
+                0 
+                1 
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-15
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        sq_2:s1 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        sq_2:s1 
+          TableScan
+            alias: s1
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key > '2') (type: boolean)
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                HashTable Sink Operator
+                  keys:
+                    0 _col0 (type: string)
+                    1 _col0 (type: string)
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Map Join Operator
+              condition map:
+                   Left Outer Join0 to 1
+              keys:
+                0 _col0 (type: string)
+                1 _col0 (type: string)
+              outputColumnNames: _col0, _col1, _col5
+              Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE Column stats: NONE
+              Filter Operator
+                predicate: _col5 is null (type: boolean)
+                Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col0 (type: string), _col1 (type: string)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: string)
+      Local Work:
+        Map Reduce Local Work
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                name: default.src_5
+
+  Stage: Stage-1
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src_5
+
+  Stage: Stage-5
+    Stats-Aggr Operator
+
+  Stage: Stage-16
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        sq_1:a 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        sq_1:a 
+          TableScan
+            alias: a
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: ((key > '9') and value is not null) (type: boolean)
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  keys: _col0 (type: string), _col1 (type: string)
+                  mode: hash
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                  HashTable Sink Operator
+                    keys:
+                      0 key (type: string), value (type: string)
+                      1 _col0 (type: string), _col1 (type: string)
+
+  Stage: Stage-12
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Map Join Operator
+              condition map:
+                   Left Semi Join 0 to 1
+              keys:
+                0 key (type: string), value (type: string)
+                1 _col0 (type: string), _col1 (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.src_4
+      Local Work:
+        Map Reduce Local Work
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src_4
+
+  Stage: Stage-7
+    Stats-Aggr Operator
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+              value expressions: key (type: string), value (type: string)
+            File Output Operator
+              compressed: false
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Semi Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+Warning: Map Join MAPJOIN[55][bigTable=b] in task 'Stage-13:MAPRED' is a cross product
+Warning: Shuffle Join JOIN[31][tables = [b, sq_2_notin_nullcheck]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: from src b 
+INSERT OVERWRITE TABLE src_4 
+  select * 
+  where b.key in 
+   (select a.key 
+    from src a 
+    where b.value = a.value and a.key > '9'
+   ) 
+INSERT OVERWRITE TABLE src_5 
+  select *  
+  where b.key not in  ( select key from src s1 where s1.key > '2') 
+  order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@src_4
+PREHOOK: Output: default@src_5
+POSTHOOK: query: from src b 
+INSERT OVERWRITE TABLE src_4 
+  select * 
+  where b.key in 
+   (select a.key 
+    from src a 
+    where b.value = a.value and a.key > '9'
+   ) 
+INSERT OVERWRITE TABLE src_5 
+  select *  
+  where b.key not in  ( select key from src s1 where s1.key > '2') 
+  order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@src_4
+POSTHOOK: Output: default@src_5
+POSTHOOK: Lineage: src_4.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+RUN: Stage-10:MAPRED
+RUN: Stage-14:CONDITIONAL
+RUN: Stage-17:MAPREDLOCAL
+RUN: Stage-13:MAPRED
+RUN: Stage-15:MAPREDLOCAL
+RUN: Stage-16:MAPREDLOCAL
+RUN: Stage-4:MAPRED
+RUN: Stage-12:MAPRED
+RUN: Stage-1:MOVE
+RUN: Stage-0:MOVE
+RUN: Stage-5:STATS
+RUN: Stage-7:STATS
+PREHOOK: query: select * from src_4
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_4
+#### A masked pattern was here ####
+POSTHOOK: query: select * from src_4
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_4
+#### A masked pattern was here ####
+90	val_90
+90	val_90
+90	val_90
+92	val_92
+95	val_95
+95	val_95
+96	val_96
+97	val_97
+97	val_97
+98	val_98
+98	val_98
+PREHOOK: query: select * from src_5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src_5
+#### A masked pattern was here ####
+POSTHOOK: query: select * from src_5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_5
+#### A masked pattern was here ####
+0	val_0
+0	val_0
+0	val_0
+10	val_10
+100	val_100
+100	val_100
+103	val_103
+103	val_103
+104	val_104
+104	val_104
+105	val_105
+11	val_11
+111	val_111
+113	val_113
+113	val_113
+114	val_114
+116	val_116
+118	val_118
+118	val_118
+119	val_119
+119	val_119
+119	val_119
+12	val_12
+12	val_12
+120	val_120
+120	val_120
+125	val_125
+125	val_125
+126	val_126
+128	val_128
+128	val_128
+128	val_128
+129	val_129
+129	val_129
+131	val_131
+133	val_133
+134	val_134
+134	val_134
+136	val_136
+137	val_137
+137	val_137
+138	val_138
+138	val_138
+138	val_138
+138	val_138
+143	val_143
+145	val_145
+146	val_146
+146	val_146
+149	val_149
+149	val_149
+15	val_15
+15	val_15
+150	val_150
+152	val_152
+152	val_152
+153	val_153
+155	val_155
+156	val_156
+157	val_157
+158	val_158
+160	val_160
+162	val_162
+163	val_163
+164	val_164
+164	val_164
+165	val_165
+165	val_165
+166	val_166
+167	val_167
+167	val_167
+167	val_167
+168	val_168
+169	val_169
+169	val_169
+169	val_169
+169	val_169
+17	val_17
+170	val_170
+172	val_172
+172	val_172
+174	val_174
+174	val_174
+175	val_175
+175	val_175
+176	val_176
+176	val_176
+177	val_177
+178	val_178
+179	val_179
+179	val_179
+18	val_18
+18	val_18
+180	val_180
+181	val_181
+183	val_183
+186	val_186
+187	val_187
+187	val_187
+187	val_187
+189	val_189
+19	val_19
+190	val_190
+191	val_191
+191	val_191
+192	val_192
+193	val_193
+193	val_193
+193	val_193
+194	val_194
+195	val_195
+195	val_195
+196	val_196
+197	val_197
+197	val_197
+199	val_199
+199	val_199
+199	val_199
+2	val_2


[04/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/subquery_notin_having.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/subquery_notin_having.q.java1.7.out b/ql/src/test/results/clientpositive/subquery_notin_having.q.java1.7.out
deleted file mode 100644
index 793b8be..0000000
--- a/ql/src/test/results/clientpositive/subquery_notin_having.q.java1.7.out
+++ /dev/null
@@ -1,766 +0,0 @@
-Warning: Shuffle Join JOIN[21][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: -- non agg, non corr
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-explain
-select key, count(*) 
-from src 
-group by key
-having key not in  
-  ( select key  from src s1 
-    where s1.key > '12'
-  )
-PREHOOK: type: QUERY
-POSTHOOK: query: -- non agg, non corr
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-explain
-select key, count(*) 
-from src 
-group by key
-having key not in  
-  ( select key  from src s1 
-    where s1.key > '12'
-  )
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1, Stage-4
-  Stage-3 depends on stages: Stage-2
-  Stage-4 is a root stage
-  Stage-0 depends on stages: Stage-3
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: key (type: string)
-              outputColumnNames: key
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: count()
-                keys: key (type: string)
-                mode: hash
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: bigint)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col0 (type: string), _col1 (type: bigint)
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          keys:
-            0 
-            1 
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string)
-              sort order: +
-              Map-reduce partition columns: _col0 (type: string)
-              Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col1 (type: bigint)
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: (key > '12') (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Outer Join0 to 1
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0, _col1, _col3
-          Statistics: Num rows: 302 Data size: 3213 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: _col3 is null (type: boolean)
-            Statistics: Num rows: 151 Data size: 1606 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: _col0 (type: string), _col1 (type: bigint)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 151 Data size: 1606 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 151 Data size: 1606 Basic stats: COMPLETE Column stats: NONE
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-4
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              Statistics: Num rows: 500 Data size: 2000 Basic stats: COMPLETE Column stats: COMPLETE
-              Filter Operator
-                predicate: false (type: boolean)
-                Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
-                Group By Operator
-                  aggregations: count()
-                  mode: hash
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                  Reduce Output Operator
-                    sort order: 
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                    value expressions: _col0 (type: bigint)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: count(VALUE._col0)
-          mode: mergepartial
-          outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-          Filter Operator
-            predicate: (_col0 = 0) (type: boolean)
-            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-            Select Operator
-              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join JOIN[29][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: -- non agg, corr
-explain
-select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a 
-  where min(p_retailprice) = l and r - l > 600
-  )
-PREHOOK: type: QUERY
-POSTHOOK: query: -- non agg, corr
-explain
-select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a 
-  where min(p_retailprice) = l and r - l > 600
-  )
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1, Stage-5
-  Stage-3 depends on stages: Stage-2, Stage-6
-  Stage-4 is a root stage
-  Stage-5 depends on stages: Stage-4
-  Stage-6 is a root stage
-  Stage-0 depends on stages: Stage-3
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: p_mfgr (type: string), p_retailprice (type: double)
-              outputColumnNames: p_mfgr, p_retailprice
-              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: min(p_retailprice)
-                keys: p_mfgr (type: string)
-                mode: hash
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: min(VALUE._col0)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col0 (type: string), _col1 (type: double)
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          keys:
-            0 
-            1 
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: double)
-              sort order: ++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: double)
-              Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: double)
-              sort order: ++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: double)
-              Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Outer Join0 to 1
-          keys:
-            0 _col0 (type: string), _col1 (type: double)
-            1 _col0 (type: string), _col1 (type: double)
-          outputColumnNames: _col0, _col1, _col3
-          Statistics: Num rows: 15 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: _col3 is null (type: boolean)
-            Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: _col0 (type: string), _col1 (type: double)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-4
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: p_mfgr (type: string), p_retailprice (type: double)
-              outputColumnNames: p_mfgr, p_retailprice
-              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: min(p_retailprice), max(p_retailprice)
-                keys: p_mfgr (type: string)
-                mode: hash
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double), _col2 (type: double)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: min(VALUE._col0), max(VALUE._col1)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: (((_col2 - _col1) > 600.0) and (_col0 is null or _col1 is null)) (type: boolean)
-            Statistics: Num rows: 2 Data size: 242 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              Statistics: Num rows: 2 Data size: 242 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: count()
-                mode: hash
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col0 (type: bigint)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: count(VALUE._col0)
-          mode: mergepartial
-          outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: (_col0 = 0) (type: boolean)
-            Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-6
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: p_mfgr (type: string), p_retailprice (type: double)
-              outputColumnNames: p_mfgr, p_retailprice
-              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: min(p_retailprice), max(p_retailprice)
-                keys: p_mfgr (type: string)
-                mode: hash
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double), _col2 (type: double)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: min(VALUE._col0), max(VALUE._col1)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: ((_col2 - _col1) > 600.0) (type: boolean)
-            Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: _col0 (type: string), _col1 (type: double)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join JOIN[29][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a 
-  where min(p_retailprice) = l and r - l > 600
-  )
-PREHOOK: type: QUERY
-PREHOOK: Input: default@part
-#### A masked pattern was here ####
-POSTHOOK: query: select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a 
-  where min(p_retailprice) = l and r - l > 600
-  )
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@part
-#### A masked pattern was here ####
-Manufacturer#1	1173.15
-Manufacturer#2	1690.68
-Warning: Shuffle Join JOIN[31][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: -- agg, non corr
-explain
-select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from part a
-  group by p_mfgr
-  having max(p_retailprice) - min(p_retailprice) > 600
-  )
-PREHOOK: type: QUERY
-POSTHOOK: query: -- agg, non corr
-explain
-select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from part a
-  group by p_mfgr
-  having max(p_retailprice) - min(p_retailprice) > 600
-  )
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1, Stage-5
-  Stage-3 depends on stages: Stage-2, Stage-6
-  Stage-4 is a root stage
-  Stage-5 depends on stages: Stage-4
-  Stage-6 is a root stage
-  Stage-0 depends on stages: Stage-3
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: p_mfgr (type: string), p_retailprice (type: double)
-              outputColumnNames: p_mfgr, p_retailprice
-              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: min(p_retailprice)
-                keys: p_mfgr (type: string)
-                mode: hash
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: min(VALUE._col0)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col0 (type: string), _col1 (type: double)
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          keys:
-            0 
-            1 
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string)
-              sort order: +
-              Map-reduce partition columns: _col0 (type: string)
-              Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col1 (type: double)
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string)
-              sort order: +
-              Map-reduce partition columns: _col0 (type: string)
-              Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Outer Join0 to 1
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0, _col1, _col3
-          Statistics: Num rows: 15 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: _col3 is null (type: boolean)
-            Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: _col0 (type: string), _col1 (type: double)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-4
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: p_mfgr is null (type: boolean)
-              Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: p_retailprice (type: double)
-                outputColumnNames: _col1
-                Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  aggregations: max(_col1), min(_col1)
-                  keys: null (type: string)
-                  mode: hash
-                  outputColumnNames: _col0, _col1, _col2
-                  Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col1 (type: double), _col2 (type: double)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: max(VALUE._col0), min(VALUE._col1)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 6 Data size: 726 Basic stats: COMPLETE Column stats: NONE
-          Select Operator
-            expressions: _col1 (type: double), _col2 (type: double)
-            outputColumnNames: _col1, _col2
-            Statistics: Num rows: 6 Data size: 726 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((_col1 - _col2) > 600.0) (type: boolean)
-              Statistics: Num rows: 2 Data size: 242 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                Statistics: Num rows: 2 Data size: 242 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  aggregations: count()
-                  mode: hash
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col0 (type: bigint)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: count(VALUE._col0)
-          mode: mergepartial
-          outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: (_col0 = 0) (type: boolean)
-            Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-6
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: p_mfgr (type: string), p_retailprice (type: double)
-              outputColumnNames: p_mfgr, p_retailprice
-              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: max(p_retailprice), min(p_retailprice)
-                keys: p_mfgr (type: string)
-                mode: hash
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double), _col2 (type: double)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: max(VALUE._col0), min(VALUE._col1)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: ((_col1 - _col2) > 600.0) (type: boolean)
-            Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: _col0 (type: string)
-              outputColumnNames: _col0
-              Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join JOIN[31][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from part a
-  group by p_mfgr
-  having max(p_retailprice) - min(p_retailprice) > 600
-  )
-PREHOOK: type: QUERY
-PREHOOK: Input: default@part
-#### A masked pattern was here ####
-POSTHOOK: query: select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from part a
-  group by p_mfgr
-  having max(p_retailprice) - min(p_retailprice) > 600
-  )
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@part
-#### A masked pattern was here ####
-Manufacturer#1	1173.15
-Manufacturer#2	1690.68

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/subquery_notin_having.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/subquery_notin_having.q.java1.8.out b/ql/src/test/results/clientpositive/subquery_notin_having.q.java1.8.out
deleted file mode 100644
index 4e227cd..0000000
--- a/ql/src/test/results/clientpositive/subquery_notin_having.q.java1.8.out
+++ /dev/null
@@ -1,762 +0,0 @@
-Warning: Shuffle Join JOIN[26][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: -- non agg, non corr
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-explain
-select key, count(*) 
-from src 
-group by key
-having key not in  
-  ( select key  from src s1 
-    where s1.key > '12'
-  )
-PREHOOK: type: QUERY
-POSTHOOK: query: -- non agg, non corr
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-explain
-select key, count(*) 
-from src 
-group by key
-having key not in  
-  ( select key  from src s1 
-    where s1.key > '12'
-  )
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1, Stage-4
-  Stage-3 depends on stages: Stage-2
-  Stage-4 is a root stage
-  Stage-0 depends on stages: Stage-3
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: key (type: string)
-              outputColumnNames: _col0
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: count()
-                keys: _col0 (type: string)
-                mode: hash
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: bigint)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: count(VALUE._col0)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col0 (type: string), _col1 (type: bigint)
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          keys:
-            0 
-            1 
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string)
-              sort order: +
-              Map-reduce partition columns: _col0 (type: string)
-              Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col1 (type: bigint)
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: (key > '12') (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Outer Join0 to 1
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0, _col1, _col3
-          Statistics: Num rows: 302 Data size: 3213 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: _col3 is null (type: boolean)
-            Statistics: Num rows: 151 Data size: 1606 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: _col0 (type: string), _col1 (type: bigint)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 151 Data size: 1606 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 151 Data size: 1606 Basic stats: COMPLETE Column stats: NONE
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-4
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((key > '12') and key is null) (type: boolean)
-              Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  aggregations: count()
-                  mode: hash
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    sort order: 
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col0 (type: bigint)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: count(VALUE._col0)
-          mode: mergepartial
-          outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: (_col0 = 0) (type: boolean)
-            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join JOIN[36][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: -- non agg, corr
-explain
-select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a 
-  where min(p_retailprice) = l and r - l > 600
-  )
-PREHOOK: type: QUERY
-POSTHOOK: query: -- non agg, corr
-explain
-select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a 
-  where min(p_retailprice) = l and r - l > 600
-  )
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1, Stage-5
-  Stage-3 depends on stages: Stage-2, Stage-6
-  Stage-4 is a root stage
-  Stage-5 depends on stages: Stage-4
-  Stage-6 is a root stage
-  Stage-0 depends on stages: Stage-3
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: p_mfgr (type: string), p_retailprice (type: double)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: min(_col1)
-                keys: _col0 (type: string)
-                mode: hash
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: min(VALUE._col0)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col0 (type: string), _col1 (type: double)
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          keys:
-            0 
-            1 
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: double)
-              sort order: ++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: double)
-              Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: double)
-              sort order: ++
-              Map-reduce partition columns: _col0 (type: string), _col1 (type: double)
-              Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Outer Join0 to 1
-          keys:
-            0 _col0 (type: string), _col1 (type: double)
-            1 _col0 (type: string), _col1 (type: double)
-          outputColumnNames: _col0, _col1, _col3
-          Statistics: Num rows: 15 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: _col3 is null (type: boolean)
-            Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: _col0 (type: string), _col1 (type: double)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-4
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: p_mfgr (type: string), p_retailprice (type: double)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: min(_col1), max(_col1)
-                keys: _col0 (type: string)
-                mode: hash
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double), _col2 (type: double)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: min(VALUE._col0), max(VALUE._col1)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: (((_col2 - _col1) > 600.0) and (_col0 is null or _col1 is null)) (type: boolean)
-            Statistics: Num rows: 2 Data size: 242 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              Statistics: Num rows: 2 Data size: 242 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: count()
-                mode: hash
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col0 (type: bigint)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: count(VALUE._col0)
-          mode: mergepartial
-          outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: (_col0 = 0) (type: boolean)
-            Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-6
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: p_mfgr (type: string), p_retailprice (type: double)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: min(_col1), max(_col1)
-                keys: _col0 (type: string)
-                mode: hash
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double), _col2 (type: double)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: min(VALUE._col0), max(VALUE._col1)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: ((_col2 - _col1) > 600.0) (type: boolean)
-            Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: _col0 (type: string), _col1 (type: double)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join JOIN[36][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a 
-  where min(p_retailprice) = l and r - l > 600
-  )
-PREHOOK: type: QUERY
-PREHOOK: Input: default@part
-#### A masked pattern was here ####
-POSTHOOK: query: select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a 
-  where min(p_retailprice) = l and r - l > 600
-  )
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@part
-#### A masked pattern was here ####
-Manufacturer#1	1173.15
-Manufacturer#2	1690.68
-Warning: Shuffle Join JOIN[39][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Stage-3:MAPRED' is a cross product
-PREHOOK: query: -- agg, non corr
-explain
-select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from part a
-  group by p_mfgr
-  having max(p_retailprice) - min(p_retailprice) > 600
-  )
-PREHOOK: type: QUERY
-POSTHOOK: query: -- agg, non corr
-explain
-select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from part a
-  group by p_mfgr
-  having max(p_retailprice) - min(p_retailprice) > 600
-  )
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1, Stage-4
-  Stage-3 depends on stages: Stage-2, Stage-6
-  Stage-4 is a root stage
-  Stage-5 is a root stage
-  Stage-6 depends on stages: Stage-5
-  Stage-0 depends on stages: Stage-3
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: p_mfgr (type: string), p_retailprice (type: double)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: min(_col1)
-                keys: _col0 (type: string)
-                mode: hash
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: min(VALUE._col0)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string)
-              sort order: +
-              Map-reduce partition columns: _col0 (type: string)
-              Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col1 (type: double)
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string)
-              sort order: +
-              Map-reduce partition columns: _col0 (type: string)
-              Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Outer Join0 to 1
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: _col2 is null (type: boolean)
-            Statistics: Num rows: 7 Data size: 865 Basic stats: COMPLETE Column stats: NONE
-            File Output Operator
-              compressed: false
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-3
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 7 Data size: 865 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col0 (type: string), _col1 (type: double)
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          keys:
-            0 
-            1 
-          outputColumnNames: _col0, _col1
-          Statistics: Num rows: 7 Data size: 951 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 7 Data size: 951 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-4
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: p_mfgr (type: string), p_retailprice (type: double)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: max(_col1), min(_col1)
-                keys: _col0 (type: string)
-                mode: hash
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double), _col2 (type: double)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: max(VALUE._col0), min(VALUE._col1)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: ((_col1 - _col2) > 600.0) (type: boolean)
-            Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: _col0 (type: string)
-              outputColumnNames: _col0
-              Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: p_mfgr is null (type: boolean)
-              Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: null (type: string), p_retailprice (type: double)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  aggregations: max(_col1), min(_col1)
-                  keys: _col0 (type: string)
-                  mode: hash
-                  outputColumnNames: _col0, _col1, _col2
-                  Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string)
-                    sort order: +
-                    Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col1 (type: double), _col2 (type: double)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: max(VALUE._col0), min(VALUE._col1)
-          keys: KEY._col0 (type: string)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 6 Data size: 726 Basic stats: COMPLETE Column stats: NONE
-          Select Operator
-            expressions: _col1 (type: double), _col2 (type: double)
-            outputColumnNames: _col1, _col2
-            Statistics: Num rows: 6 Data size: 726 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((_col1 - _col2) > 600.0) (type: boolean)
-              Statistics: Num rows: 2 Data size: 242 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                Statistics: Num rows: 2 Data size: 242 Basic stats: COMPLETE Column stats: NONE
-                Group By Operator
-                  aggregations: count()
-                  mode: hash
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-6
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-              value expressions: _col0 (type: bigint)
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: count(VALUE._col0)
-          mode: mergepartial
-          outputColumnNames: _col0
-          Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            predicate: (_col0 = 0) (type: boolean)
-            Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join JOIN[39][tables = [$hdt$_0, $hdt$_1, $hdt$_2]] in Stage 'Stage-3:MAPRED' is a cross product
-PREHOOK: query: select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from part a
-  group by p_mfgr
-  having max(p_retailprice) - min(p_retailprice) > 600
-  )
-PREHOOK: type: QUERY
-PREHOOK: Input: default@part
-#### A masked pattern was here ####
-POSTHOOK: query: select b.p_mfgr, min(p_retailprice) 
-from part b 
-group by b.p_mfgr
-having b.p_mfgr not in 
-  (select p_mfgr 
-  from part a
-  group by p_mfgr
-  having max(p_retailprice) - min(p_retailprice) > 600
-  )
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@part
-#### A masked pattern was here ####
-Manufacturer#2	1690.68
-Manufacturer#1	1173.15


[08/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.out b/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.out
index 217fe76..dfa6ea5 100644
--- a/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.out
+++ b/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.out
@@ -20,90 +20,6 @@ EXPLAIN EXTENDED
  SELECT a.key, a.value, b.key, b.value
  WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
 POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_FULLOUTERJOIN
-         TOK_TABREF
-            TOK_TABNAME
-               src
-            a
-         TOK_TABREF
-            TOK_TABNAME
-               srcpart
-            b
-         AND
-            =
-               .
-                  TOK_TABLE_OR_COL
-                     a
-                  key
-               .
-                  TOK_TABLE_OR_COL
-                     b
-                  key
-            =
-               .
-                  TOK_TABLE_OR_COL
-                     b
-                  ds
-               '2008-04-08'
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  a
-               key
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  a
-               value
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  b
-               key
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  b
-               value
-      TOK_WHERE
-         AND
-            AND
-               AND
-                  >
-                     .
-                        TOK_TABLE_OR_COL
-                           a
-                        key
-                     10
-                  <
-                     .
-                        TOK_TABLE_OR_COL
-                           a
-                        key
-                     20
-               >
-                  .
-                     TOK_TABLE_OR_COL
-                        b
-                     key
-                  15
-            <
-               .
-                  TOK_TABLE_OR_COL
-                     b
-                  key
-               25
-
-
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
   Stage-0 depends on stages: Stage-1
@@ -112,7 +28,7 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 1), Map 3 (PARTITION-LEVEL SORT, 1)
+        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 4), Map 3 (PARTITION-LEVEL SORT, 4)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -121,14 +37,19 @@ STAGE PLANS:
                   alias: a
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   GatherStats: false
-                  Reduce Output Operator
-                    key expressions: key (type: string)
-                    sort order: +
-                    Map-reduce partition columns: key (type: string)
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
                     Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                    tag: 0
-                    value expressions: value (type: string)
-                    auto parallelism: false
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string)
+                      null sort order: a
+                      sort order: +
+                      Map-reduce partition columns: _col0 (type: string)
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                      tag: 0
+                      value expressions: _col1 (type: string)
+                      auto parallelism: false
             Path -> Alias:
 #### A masked pattern was here ####
             Path -> Partition:
@@ -138,7 +59,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE true
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -158,7 +79,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE true
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -184,14 +105,19 @@ STAGE PLANS:
                   alias: b
                   Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
                   GatherStats: false
-                  Reduce Output Operator
-                    key expressions: key (type: string)
-                    sort order: +
-                    Map-reduce partition columns: key (type: string)
+                  Select Operator
+                    expressions: key (type: string), value (type: string), ds (type: string)
+                    outputColumnNames: _col0, _col1, _col2
                     Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                    tag: 1
-                    value expressions: value (type: string), ds (type: string)
-                    auto parallelism: false
+                    Reduce Output Operator
+                      key expressions: _col0 (type: string)
+                      null sort order: a
+                      sort order: +
+                      Map-reduce partition columns: _col0 (type: string)
+                      Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+                      tag: 1
+                      value expressions: _col1 (type: string), _col2 (type: string)
+                      auto parallelism: false
             Path -> Alias:
 #### A masked pattern was here ####
             Path -> Partition:
@@ -204,7 +130,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE true
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -250,7 +176,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE true
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -296,7 +222,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE true
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -342,7 +268,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE true
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -396,39 +322,36 @@ STAGE PLANS:
                   0 
                   1 {(VALUE._col1 = '2008-04-08')}
                 keys:
-                  0 key (type: string)
-                  1 key (type: string)
-                outputColumnNames: _col0, _col1, _col5, _col6
+                  0 _col0 (type: string)
+                  1 _col0 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
                   isSamplingPred: false
-                  predicate: ((((_col5 > 15) and (_col5 < 25)) and (_col0 > 10)) and (_col0 < 20)) (type: boolean)
+                  predicate: ((UDFToDouble(_col0) > 10.0) and (UDFToDouble(_col0) < 20.0) and (UDFToDouble(_col2) > 15.0) and (UDFToDouble(_col2) < 25.0)) (type: boolean)
                   Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string)
-                    outputColumnNames: _col0, _col1, _col2, _col3
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 0
+#### A masked pattern was here ####
+                    NumFilesPerFileSink: 1
                     Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 0
-#### A masked pattern was here ####
-                      NumFilesPerFileSink: 1
-                      Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                      table:
-                          input format: org.apache.hadoop.mapred.TextInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                          properties:
-                            columns _col0,_col1,_col2,_col3
-                            columns.types string:string:string:string
-                            escape.delim \
-                            hive.serialization.extend.additional.nesting.levels true
-                            serialization.format 1
-                            serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      TotalFiles: 1
-                      GatherStats: false
-                      MultiFileSpray: false
+#### A masked pattern was here ####
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        properties:
+                          columns _col0,_col1,_col2,_col3
+                          columns.types string:string:string:string
+                          escape.delim \
+                          hive.serialization.extend.additional.nesting.levels true
+                          serialization.escape.crlf true
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    TotalFiles: 1
+                    GatherStats: false
+                    MultiFileSpray: false
 
   Stage: Stage-0
     Fetch Operator
@@ -496,90 +419,6 @@ POSTHOOK: query: EXPLAIN EXTENDED
  SELECT a.key, a.value, b.key, b.value
  WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
 POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_FULLOUTERJOIN
-         TOK_TABREF
-            TOK_TABNAME
-               src
-            a
-         TOK_TABREF
-            TOK_TABNAME
-               srcpart
-            b
-         =
-            .
-               TOK_TABLE_OR_COL
-                  a
-               key
-            .
-               TOK_TABLE_OR_COL
-                  b
-               key
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  a
-               key
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  a
-               value
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  b
-               key
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  b
-               value
-      TOK_WHERE
-         AND
-            AND
-               AND
-                  AND
-                     >
-                        .
-                           TOK_TABLE_OR_COL
-                              a
-                           key
-                        10
-                     <
-                        .
-                           TOK_TABLE_OR_COL
-                              a
-                           key
-                        20
-                  >
-                     .
-                        TOK_TABLE_OR_COL
-                           b
-                        key
-                     15
-               <
-                  .
-                     TOK_TABLE_OR_COL
-                        b
-                     key
-                  25
-            =
-               .
-                  TOK_TABLE_OR_COL
-                     b
-                  ds
-               '2008-04-08'
-
-
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
   Stage-0 depends on stages: Stage-1
@@ -588,7 +427,7 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Map 3 (PARTITION-LEVEL SORT, 2)
+        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 4), Map 3 (PARTITION-LEVEL SORT, 4)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -597,14 +436,23 @@ STAGE PLANS:
                   alias: a
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   GatherStats: false
-                  Reduce Output Operator
-                    key expressions: key (type: string)
-                    sort order: +
-                    Map-reduce partition columns: key (type: string)
-                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                    tag: 0
-                    value expressions: value (type: string)
-                    auto parallelism: false
+                  Filter Operator
+                    isSamplingPred: false
+                    predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
+                    Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        null sort order: a
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                        tag: 0
+                        value expressions: _col1 (type: string)
+                        auto parallelism: false
             Path -> Alias:
 #### A masked pattern was here ####
             Path -> Partition:
@@ -614,7 +462,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE true
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -634,7 +482,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE true
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -658,16 +506,25 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: b
-                  Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
                   GatherStats: false
-                  Reduce Output Operator
-                    key expressions: key (type: string)
-                    sort order: +
-                    Map-reduce partition columns: key (type: string)
-                    Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                    tag: 1
-                    value expressions: value (type: string), ds (type: string)
-                    auto parallelism: false
+                  Filter Operator
+                    isSamplingPred: false
+                    predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
+                    Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        null sort order: a
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
+                        tag: 1
+                        value expressions: _col1 (type: string)
+                        auto parallelism: false
             Path -> Alias:
 #### A masked pattern was here ####
             Path -> Partition:
@@ -680,7 +537,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE true
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -726,99 +583,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=11
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-09
-                    hr 11
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=12
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-09
-                    hr 12
-                  properties:
-                    COLUMN_STATS_ACCURATE true
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -858,48 +623,43 @@ STAGE PLANS:
             Truncated Path -> Alias:
               /srcpart/ds=2008-04-08/hr=11 [b]
               /srcpart/ds=2008-04-08/hr=12 [b]
-              /srcpart/ds=2008-04-09/hr=11 [b]
-              /srcpart/ds=2008-04-09/hr=12 [b]
         Reducer 2 
             Needs Tagging: true
             Reduce Operator Tree:
               Join Operator
                 condition map:
-                     Outer Join 0 to 1
+                     Right Outer Join0 to 1
                 keys:
-                  0 key (type: string)
-                  1 key (type: string)
-                outputColumnNames: _col0, _col1, _col5, _col6, _col7
-                Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
+                  0 _col0 (type: string)
+                  1 _col0 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 122 Data size: 1296 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
                   isSamplingPred: false
-                  predicate: (((((_col5 > 15) and (_col5 < 25)) and (_col7 = '2008-04-08')) and (_col0 > 10)) and (_col0 < 20)) (type: boolean)
+                  predicate: ((UDFToDouble(_col0) > 10.0) and (UDFToDouble(_col0) < 20.0)) (type: boolean)
                   Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string)
-                    outputColumnNames: _col0, _col1, _col2, _col3
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 0
+#### A masked pattern was here ####
+                    NumFilesPerFileSink: 1
                     Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 0
-#### A masked pattern was here ####
-                      NumFilesPerFileSink: 1
-                      Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                      table:
-                          input format: org.apache.hadoop.mapred.TextInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                          properties:
-                            columns _col0,_col1,_col2,_col3
-                            columns.types string:string:string:string
-                            escape.delim \
-                            hive.serialization.extend.additional.nesting.levels true
-                            serialization.format 1
-                            serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      TotalFiles: 1
-                      GatherStats: false
-                      MultiFileSpray: false
+#### A masked pattern was here ####
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        properties:
+                          columns _col0,_col1,_col2,_col3
+                          columns.types string:string:string:string
+                          escape.delim \
+                          hive.serialization.extend.additional.nesting.levels true
+                          serialization.escape.crlf true
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    TotalFiles: 1
+                    GatherStats: false
+                    MultiFileSpray: false
 
   Stage: Stage-0
     Fetch Operator
@@ -919,8 +679,6 @@ PREHOOK: Input: default@src
 PREHOOK: Input: default@srcpart
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 #### A masked pattern was here ####
 POSTHOOK: query: FROM 
   src a
@@ -934,8 +692,6 @@ POSTHOOK: Input: default@src
 POSTHOOK: Input: default@srcpart
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 #### A masked pattern was here ####
 17	val_17	17	val_17
 17	val_17	17	val_17

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.java1.7.out b/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.java1.7.out
deleted file mode 100644
index b43ea5c..0000000
--- a/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.java1.7.out
+++ /dev/null
@@ -1,886 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE src_4(
-  key STRING, 
-  value STRING
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@src_4
-POSTHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE src_4(
-  key STRING, 
-  value STRING
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@src_4
-RUN: Stage-0:DDL
-PREHOOK: query: CREATE TABLE src_5( 
-  key STRING, 
-  value STRING
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@src_5
-POSTHOOK: query: CREATE TABLE src_5( 
-  key STRING, 
-  value STRING
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@src_5
-RUN: Stage-0:DDL
-Warning: Shuffle Join JOIN[31][tables = [sq_2_notin_nullcheck]] in Work 'Reducer 2' is a cross product
-PREHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-2 is a root stage
-  Stage-1 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-4 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-2
-    Spark
-      Edges:
-        Reducer 2 <- Map 10 (PARTITION-LEVEL SORT, 1), Reducer 9 (PARTITION-LEVEL SORT, 1)
-        Reducer 3 <- Map 7 (PARTITION-LEVEL SORT, 2), Reducer 2 (PARTITION-LEVEL SORT, 2)
-        Reducer 5 <- Map 11 (PARTITION-LEVEL SORT, 2), Map 6 (PARTITION-LEVEL SORT, 2)
-        Reducer 9 <- Map 8 (GROUP, 1)
-        Reducer 4 <- Reducer 3 (SORT, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 10 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    sort order: 
-                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: key (type: string), value (type: string)
-        Map 11 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: key (type: string), value (type: string)
-                    sort order: ++
-                    Map-reduce partition columns: key (type: string), value (type: string)
-                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: ((key > '9') and value is not null) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        keys: _col0 (type: string), _col1 (type: string)
-                        mode: hash
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        Reduce Output Operator
-                          key expressions: _col0 (type: string), _col1 (type: string)
-                          sort order: ++
-                          Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
-                          Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key > '2') (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-        Map 8 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: ((key > '2') and key is null) (type: boolean)
-                    Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        aggregations: count()
-                        mode: hash
-                        outputColumnNames: _col0
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                        Reduce Output Operator
-                          sort order: 
-                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                          value expressions: _col0 (type: bigint)
-        Reducer 2 
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Left Semi Join 0 to 1
-                keys:
-                  0 
-                  1 
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: string)
-        Reducer 3 
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Left Outer Join0 to 1
-                keys:
-                  0 _col0 (type: string)
-                  1 _col0 (type: string)
-                outputColumnNames: _col0, _col1, _col5
-                Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE Column stats: NONE
-                Filter Operator
-                  predicate: _col5 is null (type: boolean)
-                  Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: _col0 (type: string), _col1 (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string)
-                      sort order: +
-                      Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                      value expressions: _col1 (type: string)
-        Reducer 4 
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_5
-        Reducer 5 
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Left Semi Join 0 to 1
-                keys:
-                  0 key (type: string), value (type: string)
-                  1 _col0 (type: string), _col1 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_4
-        Reducer 9 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                Filter Operator
-                  predicate: (_col0 = 0) (type: boolean)
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                    Group By Operator
-                      keys: 0 (type: bigint)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-
-  Stage: Stage-1
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_5
-
-  Stage: Stage-3
-    Stats-Aggr Operator
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_4
-
-  Stage: Stage-4
-    Stats-Aggr Operator
-
-Warning: Shuffle Join JOIN[31][tables = [sq_2_notin_nullcheck]] in Work 'Reducer 2' is a cross product
-PREHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@src_4
-PREHOOK: Output: default@src_5
-POSTHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@src_4
-POSTHOOK: Output: default@src_5
-POSTHOOK: Lineage: src_4.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_4.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-RUN: Stage-2:MAPRED
-RUN: Stage-1:MOVE
-RUN: Stage-0:MOVE
-RUN: Stage-3:STATS
-RUN: Stage-4:STATS
-PREHOOK: query: select * from src_4
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_4
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_4
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_4
-#### A masked pattern was here ####
-90	val_90
-90	val_90
-90	val_90
-92	val_92
-95	val_95
-95	val_95
-96	val_96
-97	val_97
-97	val_97
-98	val_98
-98	val_98
-PREHOOK: query: select * from src_5
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_5
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_5
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_5
-#### A masked pattern was here ####
-0	val_0
-0	val_0
-0	val_0
-10	val_10
-100	val_100
-100	val_100
-103	val_103
-103	val_103
-104	val_104
-104	val_104
-105	val_105
-11	val_11
-111	val_111
-113	val_113
-113	val_113
-114	val_114
-116	val_116
-118	val_118
-118	val_118
-119	val_119
-119	val_119
-119	val_119
-12	val_12
-12	val_12
-120	val_120
-120	val_120
-125	val_125
-125	val_125
-126	val_126
-128	val_128
-128	val_128
-128	val_128
-129	val_129
-129	val_129
-131	val_131
-133	val_133
-134	val_134
-134	val_134
-136	val_136
-137	val_137
-137	val_137
-138	val_138
-138	val_138
-138	val_138
-138	val_138
-143	val_143
-145	val_145
-146	val_146
-146	val_146
-149	val_149
-149	val_149
-15	val_15
-15	val_15
-150	val_150
-152	val_152
-152	val_152
-153	val_153
-155	val_155
-156	val_156
-157	val_157
-158	val_158
-160	val_160
-162	val_162
-163	val_163
-164	val_164
-164	val_164
-165	val_165
-165	val_165
-166	val_166
-167	val_167
-167	val_167
-167	val_167
-168	val_168
-169	val_169
-169	val_169
-169	val_169
-169	val_169
-17	val_17
-170	val_170
-172	val_172
-172	val_172
-174	val_174
-174	val_174
-175	val_175
-175	val_175
-176	val_176
-176	val_176
-177	val_177
-178	val_178
-179	val_179
-179	val_179
-18	val_18
-18	val_18
-180	val_180
-181	val_181
-183	val_183
-186	val_186
-187	val_187
-187	val_187
-187	val_187
-189	val_189
-19	val_19
-190	val_190
-191	val_191
-191	val_191
-192	val_192
-193	val_193
-193	val_193
-193	val_193
-194	val_194
-195	val_195
-195	val_195
-196	val_196
-197	val_197
-197	val_197
-199	val_199
-199	val_199
-199	val_199
-2	val_2
-Warning: Map Join MAPJOIN[46][bigTable=b] in task 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-5 is a root stage
-  Stage-2 depends on stages: Stage-5
-  Stage-1 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-4 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-5
-    Spark
-      Edges:
-        Reducer 6 <- Map 5 (GROUP, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: ((key > '9') and value is not null) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        keys: _col0 (type: string), _col1 (type: string)
-                        mode: hash
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        Spark HashTable Sink Operator
-                          keys:
-                            0 key (type: string), value (type: string)
-                            1 _col0 (type: string), _col1 (type: string)
-            Local Work:
-              Map Reduce Local Work
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key > '2') (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Spark HashTable Sink Operator
-                        keys:
-                          0 _col0 (type: string)
-                          1 _col0 (type: string)
-            Local Work:
-              Map Reduce Local Work
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: ((key > '2') and key is null) (type: boolean)
-                    Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        aggregations: count()
-                        mode: hash
-                        outputColumnNames: _col0
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                        Reduce Output Operator
-                          sort order: 
-                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                          value expressions: _col0 (type: bigint)
-        Reducer 6 
-            Local Work:
-              Map Reduce Local Work
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                Filter Operator
-                  predicate: (_col0 = 0) (type: boolean)
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                    Group By Operator
-                      keys: 0 (type: bigint)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                      Spark HashTable Sink Operator
-                        keys:
-                          0 
-                          1 
-
-  Stage: Stage-2
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (SORT, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Map Join Operator
-                    condition map:
-                         Left Semi Join 0 to 1
-                    keys:
-                      0 
-                      1 
-                    outputColumnNames: _col0, _col1
-                    input vertices:
-                      1 Reducer 6
-                    Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                    Map Join Operator
-                      condition map:
-                           Left Outer Join0 to 1
-                      keys:
-                        0 _col0 (type: string)
-                        1 _col0 (type: string)
-                      outputColumnNames: _col0, _col1, _col5
-                      input vertices:
-                        1 Map 4
-                      Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE Column stats: NONE
-                      Filter Operator
-                        predicate: _col5 is null (type: boolean)
-                        Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                        Select Operator
-                          expressions: _col0 (type: string), _col1 (type: string)
-                          outputColumnNames: _col0, _col1
-                          Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                          Reduce Output Operator
-                            key expressions: _col0 (type: string)
-                            sort order: +
-                            Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                            value expressions: _col1 (type: string)
-                  Map Join Operator
-                    condition map:
-                         Left Semi Join 0 to 1
-                    keys:
-                      0 key (type: string), value (type: string)
-                      1 _col0 (type: string), _col1 (type: string)
-                    outputColumnNames: _col0, _col1
-                    input vertices:
-                      1 Map 3
-                    Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
-                      Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                      table:
-                          input format: org.apache.hadoop.mapred.TextInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                          name: default.src_4
-            Local Work:
-              Map Reduce Local Work
-        Reducer 2 
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_5
-
-  Stage: Stage-1
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_5
-
-  Stage: Stage-3
-    Stats-Aggr Operator
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_4
-
-  Stage: Stage-4
-    Stats-Aggr Operator
-
-Warning: Map Join MAPJOIN[46][bigTable=b] in task 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@src_4
-PREHOOK: Output: default@src_5
-POSTHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@src_4
-POSTHOOK: Output: default@src_5
-POSTHOOK: Lineage: src_4.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_4.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-RUN: Stage-5:MAPRED
-RUN: Stage-2:MAPRED
-RUN: Stage-1:MOVE
-RUN: Stage-0:MOVE
-RUN: Stage-3:STATS
-RUN: Stage-4:STATS
-PREHOOK: query: select * from src_4
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_4
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_4
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_4
-#### A masked pattern was here ####
-90	val_90
-90	val_90
-90	val_90
-92	val_92
-95	val_95
-95	val_95
-96	val_96
-97	val_97
-97	val_97
-98	val_98
-98	val_98
-PREHOOK: query: select * from src_5
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_5
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_5
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_5
-#### A masked pattern was here ####
-0	val_0
-0	val_0
-0	val_0
-10	val_10
-100	val_100
-100	val_100
-103	val_103
-103	val_103
-104	val_104
-104	val_104
-105	val_105
-11	val_11
-111	val_111
-113	val_113
-113	val_113
-114	val_114
-116	val_116
-118	val_118
-118	val_118
-119	val_119
-119	val_119
-119	val_119
-12	val_12
-12	val_12
-120	val_120
-120	val_120
-125	val_125
-125	val_125
-126	val_126
-128	val_128
-128	val_128
-128	val_128
-129	val_129
-129	val_129
-131	val_131
-133	val_133
-134	val_134
-134	val_134
-136	val_136
-137	val_137
-137	val_137
-138	val_138
-138	val_138
-138	val_138
-138	val_138
-143	val_143
-145	val_145
-146	val_146
-146	val_146
-149	val_149
-149	val_149
-15	val_15
-15	val_15
-150	val_150
-152	val_152
-152	val_152
-153	val_153
-155	val_155
-156	val_156
-157	val_157
-158	val_158
-160	val_160
-162	val_162
-163	val_163
-164	val_164
-164	val_164
-165	val_165
-165	val_165
-166	val_166
-167	val_167
-167	val_167
-167	val_167
-168	val_168
-169	val_169
-169	val_169
-169	val_169
-169	val_169
-17	val_17
-170	val_170
-172	val_172
-172	val_172
-174	val_174
-174	val_174
-175	val_175
-175	val_175
-176	val_176
-176	val_176
-177	val_177
-178	val_178
-179	val_179
-179	val_179
-18	val_18
-18	val_18
-180	val_180
-181	val_181
-183	val_183
-186	val_186
-187	val_187
-187	val_187
-187	val_187
-189	val_189
-19	val_19
-190	val_190
-191	val_191
-191	val_191
-192	val_192
-193	val_193
-193	val_193
-193	val_193
-194	val_194
-195	val_195
-195	val_195
-196	val_196
-197	val_197
-197	val_197
-199	val_199
-199	val_199
-199	val_199
-2	val_2


[11/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/outer_join_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/outer_join_ppr.q.out b/ql/src/test/results/clientpositive/outer_join_ppr.q.out
new file mode 100644
index 0000000..cf20851
--- /dev/null
+++ b/ql/src/test/results/clientpositive/outer_join_ppr.q.out
@@ -0,0 +1,683 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN EXTENDED
+ FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+PREHOOK: type: QUERY
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN EXTENDED
+ FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: a
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col0 (type: string)
+                null sort order: a
+                sort order: +
+                Map-reduce partition columns: _col0 (type: string)
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                tag: 0
+                value expressions: _col1 (type: string)
+                auto parallelism: false
+          TableScan
+            alias: b
+            Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string), ds (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: _col0 (type: string)
+                null sort order: a
+                sort order: +
+                Map-reduce partition columns: _col0 (type: string)
+                Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+                tag: 1
+                value expressions: _col1 (type: string), _col2 (type: string)
+                auto parallelism: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 500
+              rawDataSize 5312
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 500
+                rawDataSize 5312
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-09
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-09
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /src [$hdt$_0:a]
+        /srcpart/ds=2008-04-08/hr=11 [$hdt$_1:b]
+        /srcpart/ds=2008-04-08/hr=12 [$hdt$_1:b]
+        /srcpart/ds=2008-04-09/hr=11 [$hdt$_1:b]
+        /srcpart/ds=2008-04-09/hr=12 [$hdt$_1:b]
+      Needs Tagging: true
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          filter mappings:
+            1 [0, 1]
+          filter predicates:
+            0 
+            1 {(VALUE._col1 = '2008-04-08')}
+          keys:
+            0 _col0 (type: string)
+            1 _col0 (type: string)
+          outputColumnNames: _col0, _col1, _col2, _col3
+          Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((UDFToDouble(_col0) > 10.0) and (UDFToDouble(_col0) < 20.0) and (UDFToDouble(_col2) > 15.0) and (UDFToDouble(_col2) < 25.0)) (type: boolean)
+            Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 1
+              Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  properties:
+                    columns _col0,_col1,_col2,_col3
+                    columns.types string:string:string:string
+                    escape.delim \
+                    hive.serialization.extend.additional.nesting.levels true
+                    serialization.escape.crlf true
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+17	val_17	17	val_17
+17	val_17	17	val_17
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+19	val_19	19	val_19
+19	val_19	19	val_19
+PREHOOK: query: EXPLAIN EXTENDED
+ FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED
+ FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: a
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Filter Operator
+              isSamplingPred: false
+              predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
+              Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  null sort order: a
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                  tag: 0
+                  value expressions: _col1 (type: string)
+                  auto parallelism: false
+          TableScan
+            alias: b
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Filter Operator
+              isSamplingPred: false
+              predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
+              Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  null sort order: a
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
+                  tag: 1
+                  value expressions: _col1 (type: string)
+                  auto parallelism: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 500
+              rawDataSize 5312
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 500
+                rawDataSize 5312
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /src [$hdt$_0:a]
+        /srcpart/ds=2008-04-08/hr=11 [$hdt$_1:b]
+        /srcpart/ds=2008-04-08/hr=12 [$hdt$_1:b]
+      Needs Tagging: true
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Right Outer Join0 to 1
+          keys:
+            0 _col0 (type: string)
+            1 _col0 (type: string)
+          outputColumnNames: _col0, _col1, _col2, _col3
+          Statistics: Num rows: 122 Data size: 1296 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((UDFToDouble(_col0) > 10.0) and (UDFToDouble(_col0) < 20.0)) (type: boolean)
+            Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+#### A masked pattern was here ####
+              NumFilesPerFileSink: 1
+              Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  properties:
+                    columns _col0,_col1,_col2,_col3
+                    columns.types string:string:string:string
+                    escape.delim \
+                    hive.serialization.extend.additional.nesting.levels true
+                    serialization.escape.crlf true
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+17	val_17	17	val_17
+17	val_17	17	val_17
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+19	val_19	19	val_19
+19	val_19	19	val_19

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/parquet_map_null.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/parquet_map_null.q.java1.7.out b/ql/src/test/results/clientpositive/parquet_map_null.q.java1.7.out
deleted file mode 100644
index 825e668..0000000
--- a/ql/src/test/results/clientpositive/parquet_map_null.q.java1.7.out
+++ /dev/null
@@ -1,70 +0,0 @@
-PREHOOK: query: -- This test attempts to write a parquet table from an avro table that contains map null values
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE IF EXISTS avro_table
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- This test attempts to write a parquet table from an avro table that contains map null values
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE IF EXISTS avro_table
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE IF EXISTS parquet_table
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE IF EXISTS parquet_table
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@avro_table
-POSTHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@avro_table
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@avro_table
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@avro_table
-PREHOOK: query: CREATE TABLE parquet_table STORED AS PARQUET AS SELECT * FROM avro_table
-PREHOOK: type: CREATETABLE_AS_SELECT
-PREHOOK: Input: default@avro_table
-PREHOOK: Output: database:default
-PREHOOK: Output: default@parquet_table
-POSTHOOK: query: CREATE TABLE parquet_table STORED AS PARQUET AS SELECT * FROM avro_table
-POSTHOOK: type: CREATETABLE_AS_SELECT
-POSTHOOK: Input: default@avro_table
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@parquet_table
-POSTHOOK: Lineage: parquet_table.avreau_col_1 SIMPLE [(avro_table)avro_table.FieldSchema(name:avreau_col_1, type:map<string,string>, comment:), ]
-PREHOOK: query: SELECT * FROM parquet_table
-PREHOOK: type: QUERY
-PREHOOK: Input: default@parquet_table
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM parquet_table
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@parquet_table
-#### A masked pattern was here ####
-{"key4":null,"key3":"val3"}
-{"key4":null,"key3":"val3"}
-{"key2":"val2","key1":null}
-{"key4":null,"key3":"val3"}
-{"key4":null,"key3":"val3"}
-PREHOOK: query: DROP TABLE avro_table
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@avro_table
-PREHOOK: Output: default@avro_table
-POSTHOOK: query: DROP TABLE avro_table
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@avro_table
-POSTHOOK: Output: default@avro_table
-PREHOOK: query: DROP TABLE parquet_table
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@parquet_table
-PREHOOK: Output: default@parquet_table
-POSTHOOK: query: DROP TABLE parquet_table
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@parquet_table
-POSTHOOK: Output: default@parquet_table

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/parquet_map_null.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/parquet_map_null.q.java1.8.out b/ql/src/test/results/clientpositive/parquet_map_null.q.java1.8.out
deleted file mode 100644
index 1462cc2..0000000
--- a/ql/src/test/results/clientpositive/parquet_map_null.q.java1.8.out
+++ /dev/null
@@ -1,70 +0,0 @@
-PREHOOK: query: -- This test attempts to write a parquet table from an avro table that contains map null values
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE IF EXISTS avro_table
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- This test attempts to write a parquet table from an avro table that contains map null values
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE IF EXISTS avro_table
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE IF EXISTS parquet_table
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE IF EXISTS parquet_table
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@avro_table
-POSTHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@avro_table
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@avro_table
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@avro_table
-PREHOOK: query: CREATE TABLE parquet_table STORED AS PARQUET AS SELECT * FROM avro_table
-PREHOOK: type: CREATETABLE_AS_SELECT
-PREHOOK: Input: default@avro_table
-PREHOOK: Output: database:default
-PREHOOK: Output: default@parquet_table
-POSTHOOK: query: CREATE TABLE parquet_table STORED AS PARQUET AS SELECT * FROM avro_table
-POSTHOOK: type: CREATETABLE_AS_SELECT
-POSTHOOK: Input: default@avro_table
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@parquet_table
-POSTHOOK: Lineage: parquet_table.avreau_col_1 SIMPLE [(avro_table)avro_table.FieldSchema(name:avreau_col_1, type:map<string,string>, comment:), ]
-PREHOOK: query: SELECT * FROM parquet_table
-PREHOOK: type: QUERY
-PREHOOK: Input: default@parquet_table
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM parquet_table
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@parquet_table
-#### A masked pattern was here ####
-{"key3":"val3","key4":null}
-{"key3":"val3","key4":null}
-{"key1":null,"key2":"val2"}
-{"key3":"val3","key4":null}
-{"key3":"val3","key4":null}
-PREHOOK: query: DROP TABLE avro_table
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@avro_table
-PREHOOK: Output: default@avro_table
-POSTHOOK: query: DROP TABLE avro_table
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@avro_table
-POSTHOOK: Output: default@avro_table
-PREHOOK: query: DROP TABLE parquet_table
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@parquet_table
-PREHOOK: Output: default@parquet_table
-POSTHOOK: query: DROP TABLE parquet_table
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@parquet_table
-POSTHOOK: Output: default@parquet_table

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/parquet_map_null.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/parquet_map_null.q.out b/ql/src/test/results/clientpositive/parquet_map_null.q.out
new file mode 100644
index 0000000..d1357c1
--- /dev/null
+++ b/ql/src/test/results/clientpositive/parquet_map_null.q.out
@@ -0,0 +1,68 @@
+PREHOOK: query: -- This test attempts to write a parquet table from an avro table that contains map null values
+
+DROP TABLE IF EXISTS avro_table
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- This test attempts to write a parquet table from an avro table that contains map null values
+
+DROP TABLE IF EXISTS avro_table
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE IF EXISTS parquet_table
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE IF EXISTS parquet_table
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_table
+POSTHOOK: query: CREATE TABLE avro_table (avreau_col_1 map<string,string>) STORED AS AVRO
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_table
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@avro_table
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/map_null_val.avro' OVERWRITE INTO TABLE avro_table
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@avro_table
+PREHOOK: query: CREATE TABLE parquet_table STORED AS PARQUET AS SELECT * FROM avro_table
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@avro_table
+PREHOOK: Output: database:default
+PREHOOK: Output: default@parquet_table
+POSTHOOK: query: CREATE TABLE parquet_table STORED AS PARQUET AS SELECT * FROM avro_table
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@avro_table
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@parquet_table
+POSTHOOK: Lineage: parquet_table.avreau_col_1 SIMPLE [(avro_table)avro_table.FieldSchema(name:avreau_col_1, type:map<string,string>, comment:), ]
+PREHOOK: query: SELECT * FROM parquet_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@parquet_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM parquet_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@parquet_table
+#### A masked pattern was here ####
+{"key3":"val3","key4":null}
+{"key3":"val3","key4":null}
+{"key1":null,"key2":"val2"}
+{"key3":"val3","key4":null}
+{"key3":"val3","key4":null}
+PREHOOK: query: DROP TABLE avro_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@avro_table
+PREHOOK: Output: default@avro_table
+POSTHOOK: query: DROP TABLE avro_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@avro_table
+POSTHOOK: Output: default@avro_table
+PREHOOK: query: DROP TABLE parquet_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@parquet_table
+PREHOOK: Output: default@parquet_table
+POSTHOOK: query: DROP TABLE parquet_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@parquet_table
+POSTHOOK: Output: default@parquet_table

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/plan_json.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/plan_json.q.java1.7.out b/ql/src/test/results/clientpositive/plan_json.q.java1.7.out
deleted file mode 100644
index dda4adc..0000000
--- a/ql/src/test/results/clientpositive/plan_json.q.java1.7.out
+++ /dev/null
@@ -1,13 +0,0 @@
-PREHOOK: query: -- explain plan json:  the query gets the formatted json output of the query plan of the hive query
-
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN FORMATTED SELECT count(1) FROM src
-PREHOOK: type: QUERY
-POSTHOOK: query: -- explain plan json:  the query gets the formatted json output of the query plan of the hive query
-
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN FORMATTED SELECT count(1) FROM src
-POSTHOOK: type: QUERY
-{"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-0":{"DEPENDENT STAGES":"Stage-1"}},"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"src","Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Select Operator":{"Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Group By Operator":{"aggregations:":["count(1)"],"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Reduce Output Operator":{"sort order:":"","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","value expressions:":"_col0 (type: bigint)"}}}}}}}}],"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column 
 stats: COMPLETE","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{}}}}}}

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/plan_json.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/plan_json.q.java1.8.out b/ql/src/test/results/clientpositive/plan_json.q.java1.8.out
deleted file mode 100644
index dda4adc..0000000
--- a/ql/src/test/results/clientpositive/plan_json.q.java1.8.out
+++ /dev/null
@@ -1,13 +0,0 @@
-PREHOOK: query: -- explain plan json:  the query gets the formatted json output of the query plan of the hive query
-
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN FORMATTED SELECT count(1) FROM src
-PREHOOK: type: QUERY
-POSTHOOK: query: -- explain plan json:  the query gets the formatted json output of the query plan of the hive query
-
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN FORMATTED SELECT count(1) FROM src
-POSTHOOK: type: QUERY
-{"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-0":{"DEPENDENT STAGES":"Stage-1"}},"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"src","Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Select Operator":{"Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Group By Operator":{"aggregations:":["count(1)"],"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Reduce Output Operator":{"sort order:":"","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","value expressions:":"_col0 (type: bigint)"}}}}}}}}],"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column 
 stats: COMPLETE","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{}}}}}}

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/plan_json.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/plan_json.q.out b/ql/src/test/results/clientpositive/plan_json.q.out
new file mode 100644
index 0000000..98c6626
--- /dev/null
+++ b/ql/src/test/results/clientpositive/plan_json.q.out
@@ -0,0 +1,11 @@
+PREHOOK: query: -- explain plan json:  the query gets the formatted json output of the query plan of the hive query
+
+
+EXPLAIN FORMATTED SELECT count(1) FROM src
+PREHOOK: type: QUERY
+POSTHOOK: query: -- explain plan json:  the query gets the formatted json output of the query plan of the hive query
+
+
+EXPLAIN FORMATTED SELECT count(1) FROM src
+POSTHOOK: type: QUERY
+{"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-0":{"DEPENDENT STAGES":"Stage-1"}},"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"src","Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Select Operator":{"Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Group By Operator":{"aggregations:":["count(1)"],"mode:":"hash","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","children":{"Reduce Output Operator":{"sort order:":"","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","value expressions:":"_col0 (type: bigint)"}}}}}}}}],"Reduce Operator Tree:":{"Group By Operator":{"aggregations:":["count(VALUE._col0)"],"mode:":"mergepartial","outputColumnNames:":["_col0"],"Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column 
 stats: COMPLETE","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{}}}}}}

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/join0.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join0.q.java1.7.out b/ql/src/test/results/clientpositive/spark/join0.q.java1.7.out
deleted file mode 100644
index b3a58d0..0000000
--- a/ql/src/test/results/clientpositive/spark/join0.q.java1.7.out
+++ /dev/null
@@ -1,238 +0,0 @@
-Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Work 'Reducer 2' is a cross product
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 1), Map 4 (PARTITION-LEVEL SORT, 1)
-        Reducer 3 <- Reducer 2 (PARTITION-LEVEL SORT, 2)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: string)
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: string)
-        Reducer 2 
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                keys:
-                  0 
-                  1 
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-                  sort order: ++++
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-        Reducer 3 
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Work 'Reducer 2' is a cross product
-PREHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-#### A masked pattern was here ####
-Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Work 'Reducer 2' is a cross product
-PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	9	val_9
-0	val_0	9	val_9
-0	val_0	9	val_9
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	2	val_2
-2	val_2	4	val_4
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	8	val_8
-2	val_2	9	val_9
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	2	val_2
-4	val_4	4	val_4
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	8	val_8
-4	val_4	9	val_9
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	9	val_9
-5	val_5	9	val_9
-5	val_5	9	val_9
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	2	val_2
-8	val_8	4	val_4
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	8	val_8
-8	val_8	9	val_9
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	2	val_2
-9	val_9	4	val_4
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	8	val_8
-9	val_9	9	val_9

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/join0.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join0.q.java1.8.out b/ql/src/test/results/clientpositive/spark/join0.q.java1.8.out
deleted file mode 100644
index 7acd108..0000000
--- a/ql/src/test/results/clientpositive/spark/join0.q.java1.8.out
+++ /dev/null
@@ -1,238 +0,0 @@
-Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Work 'Reducer 2' is a cross product
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 1), Map 4 (PARTITION-LEVEL SORT, 1)
-        Reducer 3 <- Reducer 2 (PARTITION-LEVEL SORT, 2)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: string)
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: string)
-        Reducer 2 
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                keys:
-                  0 
-                  1 
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-                  sort order: ++++
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-        Reducer 3 
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Work 'Reducer 2' is a cross product
-PREHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-#### A masked pattern was here ####
-Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Work 'Reducer 2' is a cross product
-PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	9	val_9
-0	val_0	9	val_9
-0	val_0	9	val_9
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	2	val_2
-2	val_2	4	val_4
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	8	val_8
-2	val_2	9	val_9
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	2	val_2
-4	val_4	4	val_4
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	8	val_8
-4	val_4	9	val_9
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	9	val_9
-5	val_5	9	val_9
-5	val_5	9	val_9
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	2	val_2
-8	val_8	4	val_4
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	8	val_8
-8	val_8	9	val_9
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	2	val_2
-9	val_9	4	val_4
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	8	val_8
-9	val_9	9	val_9

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/join0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join0.q.out b/ql/src/test/results/clientpositive/spark/join0.q.out
index 56b154f..bc98bb4 100644
--- a/ql/src/test/results/clientpositive/spark/join0.q.out
+++ b/ql/src/test/results/clientpositive/spark/join0.q.out
@@ -1,5 +1,7 @@
 Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Work 'Reducer 2' is a cross product
-PREHOOK: query: EXPLAIN
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN
 SELECT src1.key as k1, src1.value as v1, 
        src2.key as k2, src2.value as v2 FROM 
   (SELECT * FROM src WHERE src.key < 10) src1 
@@ -7,7 +9,9 @@ SELECT src1.key as k1, src1.value as v1,
   (SELECT * FROM src WHERE src.key < 10) src2
   SORT BY k1, v1, k2, v2
 PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN
 SELECT src1.key as k1, src1.value as v1, 
        src2.key as k2, src2.value as v2 FROM 
   (SELECT * FROM src WHERE src.key < 10) src1 
@@ -24,7 +28,7 @@ STAGE PLANS:
     Spark
       Edges:
         Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 1), Map 4 (PARTITION-LEVEL SORT, 1)
-        Reducer 3 <- Reducer 2 (SORT, 1)
+        Reducer 3 <- Reducer 2 (PARTITION-LEVEL SORT, 4)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -64,9 +68,9 @@ STAGE PLANS:
               Join Operator
                 condition map:
                      Inner Join 0 to 1
-                condition expressions:
-                  0 {VALUE._col0} {VALUE._col1}
-                  1 {VALUE._col0} {VALUE._col1}
+                keys:
+                  0 
+                  1 
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
@@ -83,8 +87,8 @@ STAGE PLANS:
                   compressed: false
                   Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
                   table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/list_bucket_dml_10.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/list_bucket_dml_10.q.java1.7.out b/ql/src/test/results/clientpositive/spark/list_bucket_dml_10.q.java1.7.out
deleted file mode 100644
index 3040544..0000000
--- a/ql/src/test/results/clientpositive/spark/list_bucket_dml_10.q.java1.7.out
+++ /dev/null
@@ -1,252 +0,0 @@
-PREHOOK: query: -- run this test case in minimr to ensure it works in cluster
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key) on ('484','51','103')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- run this test case in minimr to ensure it works in cluster
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key) on ('484','51','103')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from src
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from src
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  GatherStats: false
-                  Select Operator
-                    expressions: key (type: string), value (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-#### A masked pattern was here ####
-                      NumFilesPerFileSink: 1
-                      Static Partition Specification: ds=2008-04-08/hr=11/
-                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                      table:
-                          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                          properties:
-                            bucket_count -1
-                            columns key,value
-                            columns.comments 
-                            columns.types string:string
-#### A masked pattern was here ####
-                            name default.list_bucketing_static_part
-                            partition_columns ds/hr
-                            partition_columns.types string:string
-                            serialization.ddl struct list_bucketing_static_part { string key, string value}
-                            serialization.format 1
-                            serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                          serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                          name: default.list_bucketing_static_part
-                      TotalFiles: 1
-                      GatherStats: true
-                      MultiFileSpray: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: src
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.src
-                    numFiles 1
-                    numRows 500
-                    rawDataSize 5312
-                    serialization.ddl struct src { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.src
-                      numFiles 1
-                      numRows 500
-                      rawDataSize 5312
-                      serialization.ddl struct src { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 5812
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src
-                  name: default.src
-            Truncated Path -> Alias:
-              /src [src]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	4                   
-	numRows             	500                 
-	rawDataSize         	4812                
-	totalSize           	5520                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key]               	 
-Skewed Values:      	[[484], [51], [103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484, [103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103, [51]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=51}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   


[07/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.java1.8.out b/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.java1.8.out
deleted file mode 100644
index 1bfdba2..0000000
--- a/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.java1.8.out
+++ /dev/null
@@ -1,890 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE src_4(
-  key STRING, 
-  value STRING
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@src_4
-POSTHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE src_4(
-  key STRING, 
-  value STRING
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@src_4
-RUN: Stage-0:DDL
-PREHOOK: query: CREATE TABLE src_5( 
-  key STRING, 
-  value STRING
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@src_5
-POSTHOOK: query: CREATE TABLE src_5( 
-  key STRING, 
-  value STRING
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@src_5
-RUN: Stage-0:DDL
-Warning: Shuffle Join JOIN[31][tables = [sq_2_notin_nullcheck]] in Work 'Reducer 2' is a cross product
-PREHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-2 is a root stage
-  Stage-1 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-4 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-2
-    Spark
-      Edges:
-        Reducer 2 <- Map 10 (PARTITION-LEVEL SORT, 1), Reducer 9 (PARTITION-LEVEL SORT, 1)
-        Reducer 3 <- Map 7 (PARTITION-LEVEL SORT, 2), Reducer 2 (PARTITION-LEVEL SORT, 2)
-        Reducer 5 <- Map 11 (PARTITION-LEVEL SORT, 2), Map 6 (PARTITION-LEVEL SORT, 2)
-        Reducer 9 <- Map 8 (GROUP, 1)
-        Reducer 4 <- Reducer 3 (SORT, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 10 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    sort order: 
-                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: key (type: string), value (type: string)
-        Map 11 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: key (type: string), value (type: string)
-                    sort order: ++
-                    Map-reduce partition columns: key (type: string), value (type: string)
-                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-        Map 6 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: ((key > '9') and value is not null) (type: boolean)
-                    Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        keys: _col0 (type: string), _col1 (type: string)
-                        mode: hash
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                        Reduce Output Operator
-                          key expressions: _col0 (type: string), _col1 (type: string)
-                          sort order: ++
-                          Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
-                          Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-        Map 7 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key > '2') (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-        Map 8 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: ((key > '2') and key is null) (type: boolean)
-                    Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        aggregations: count()
-                        mode: hash
-                        outputColumnNames: _col0
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                        Reduce Output Operator
-                          sort order: 
-                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                          value expressions: _col0 (type: bigint)
-        Reducer 2 
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Left Semi Join 0 to 1
-                keys:
-                  0 
-                  1 
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: string)
-        Reducer 3 
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Left Outer Join0 to 1
-                keys:
-                  0 _col0 (type: string)
-                  1 _col0 (type: string)
-                outputColumnNames: _col0, _col1, _col5
-                Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE Column stats: NONE
-                Filter Operator
-                  predicate: _col5 is null (type: boolean)
-                  Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: _col0 (type: string), _col1 (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string)
-                      sort order: +
-                      Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                      value expressions: _col1 (type: string)
-        Reducer 4 
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_5
-        Reducer 5 
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Left Semi Join 0 to 1
-                keys:
-                  0 key (type: string), value (type: string)
-                  1 _col0 (type: string), _col1 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_4
-        Reducer 9 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                Filter Operator
-                  predicate: (_col0 = 0) (type: boolean)
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: 0 (type: bigint)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                    Group By Operator
-                      keys: _col0 (type: bigint)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-
-  Stage: Stage-1
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_5
-
-  Stage: Stage-3
-    Stats-Aggr Operator
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_4
-
-  Stage: Stage-4
-    Stats-Aggr Operator
-
-Warning: Shuffle Join JOIN[31][tables = [sq_2_notin_nullcheck]] in Work 'Reducer 2' is a cross product
-PREHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@src_4
-PREHOOK: Output: default@src_5
-POSTHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@src_4
-POSTHOOK: Output: default@src_5
-POSTHOOK: Lineage: src_4.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_4.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-RUN: Stage-2:MAPRED
-RUN: Stage-1:MOVE
-RUN: Stage-0:MOVE
-RUN: Stage-3:STATS
-RUN: Stage-4:STATS
-PREHOOK: query: select * from src_4
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_4
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_4
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_4
-#### A masked pattern was here ####
-90	val_90
-90	val_90
-90	val_90
-92	val_92
-95	val_95
-95	val_95
-96	val_96
-97	val_97
-97	val_97
-98	val_98
-98	val_98
-PREHOOK: query: select * from src_5
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_5
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_5
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_5
-#### A masked pattern was here ####
-0	val_0
-0	val_0
-0	val_0
-10	val_10
-100	val_100
-100	val_100
-103	val_103
-103	val_103
-104	val_104
-104	val_104
-105	val_105
-11	val_11
-111	val_111
-113	val_113
-113	val_113
-114	val_114
-116	val_116
-118	val_118
-118	val_118
-119	val_119
-119	val_119
-119	val_119
-12	val_12
-12	val_12
-120	val_120
-120	val_120
-125	val_125
-125	val_125
-126	val_126
-128	val_128
-128	val_128
-128	val_128
-129	val_129
-129	val_129
-131	val_131
-133	val_133
-134	val_134
-134	val_134
-136	val_136
-137	val_137
-137	val_137
-138	val_138
-138	val_138
-138	val_138
-138	val_138
-143	val_143
-145	val_145
-146	val_146
-146	val_146
-149	val_149
-149	val_149
-15	val_15
-15	val_15
-150	val_150
-152	val_152
-152	val_152
-153	val_153
-155	val_155
-156	val_156
-157	val_157
-158	val_158
-160	val_160
-162	val_162
-163	val_163
-164	val_164
-164	val_164
-165	val_165
-165	val_165
-166	val_166
-167	val_167
-167	val_167
-167	val_167
-168	val_168
-169	val_169
-169	val_169
-169	val_169
-169	val_169
-17	val_17
-170	val_170
-172	val_172
-172	val_172
-174	val_174
-174	val_174
-175	val_175
-175	val_175
-176	val_176
-176	val_176
-177	val_177
-178	val_178
-179	val_179
-179	val_179
-18	val_18
-18	val_18
-180	val_180
-181	val_181
-183	val_183
-186	val_186
-187	val_187
-187	val_187
-187	val_187
-189	val_189
-19	val_19
-190	val_190
-191	val_191
-191	val_191
-192	val_192
-193	val_193
-193	val_193
-193	val_193
-194	val_194
-195	val_195
-195	val_195
-196	val_196
-197	val_197
-197	val_197
-199	val_199
-199	val_199
-199	val_199
-2	val_2
-Warning: Map Join MAPJOIN[46][bigTable=b] in task 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-POSTHOOK: query: explain
-from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-5 is a root stage
-  Stage-2 depends on stages: Stage-5
-  Stage-1 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-4 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-5
-    Spark
-      Edges:
-        Reducer 6 <- Map 5 (GROUP, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: ((key > '9') and value is not null) (type: boolean)
-                    Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        keys: _col0 (type: string), _col1 (type: string)
-                        mode: hash
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                        Spark HashTable Sink Operator
-                          keys:
-                            0 key (type: string), value (type: string)
-                            1 _col0 (type: string), _col1 (type: string)
-            Local Work:
-              Map Reduce Local Work
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key > '2') (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Spark HashTable Sink Operator
-                        keys:
-                          0 _col0 (type: string)
-                          1 _col0 (type: string)
-            Local Work:
-              Map Reduce Local Work
-        Map 5 
-            Map Operator Tree:
-                TableScan
-                  alias: s1
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: ((key > '2') and key is null) (type: boolean)
-                    Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
-                      Group By Operator
-                        aggregations: count()
-                        mode: hash
-                        outputColumnNames: _col0
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                        Reduce Output Operator
-                          sort order: 
-                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                          value expressions: _col0 (type: bigint)
-        Reducer 6 
-            Local Work:
-              Map Reduce Local Work
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                Filter Operator
-                  predicate: (_col0 = 0) (type: boolean)
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: 0 (type: bigint)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                    Group By Operator
-                      keys: _col0 (type: bigint)
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                      Spark HashTable Sink Operator
-                        keys:
-                          0 
-                          1 
-
-  Stage: Stage-2
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (SORT, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Map Join Operator
-                    condition map:
-                         Left Semi Join 0 to 1
-                    keys:
-                      0 
-                      1 
-                    outputColumnNames: _col0, _col1
-                    input vertices:
-                      1 Reducer 6
-                    Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                    Map Join Operator
-                      condition map:
-                           Left Outer Join0 to 1
-                      keys:
-                        0 _col0 (type: string)
-                        1 _col0 (type: string)
-                      outputColumnNames: _col0, _col1, _col5
-                      input vertices:
-                        1 Map 4
-                      Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE Column stats: NONE
-                      Filter Operator
-                        predicate: _col5 is null (type: boolean)
-                        Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                        Select Operator
-                          expressions: _col0 (type: string), _col1 (type: string)
-                          outputColumnNames: _col0, _col1
-                          Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                          Reduce Output Operator
-                            key expressions: _col0 (type: string)
-                            sort order: +
-                            Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                            value expressions: _col1 (type: string)
-                  Map Join Operator
-                    condition map:
-                         Left Semi Join 0 to 1
-                    keys:
-                      0 key (type: string), value (type: string)
-                      1 _col0 (type: string), _col1 (type: string)
-                    outputColumnNames: _col0, _col1
-                    input vertices:
-                      1 Map 3
-                    Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
-                      Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE
-                      table:
-                          input format: org.apache.hadoop.mapred.TextInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                          name: default.src_4
-            Local Work:
-              Map Reduce Local Work
-        Reducer 2 
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 302 Data size: 3208 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.src_5
-
-  Stage: Stage-1
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_5
-
-  Stage: Stage-3
-    Stats-Aggr Operator
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src_4
-
-  Stage: Stage-4
-    Stats-Aggr Operator
-
-Warning: Map Join MAPJOIN[46][bigTable=b] in task 'Stage-2:MAPRED' is a cross product
-PREHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@src_4
-PREHOOK: Output: default@src_5
-POSTHOOK: query: from src b 
-INSERT OVERWRITE TABLE src_4 
-  select * 
-  where b.key in 
-   (select a.key 
-    from src a 
-    where b.value = a.value and a.key > '9'
-   ) 
-INSERT OVERWRITE TABLE src_5 
-  select *  
-  where b.key not in  ( select key from src s1 where s1.key > '2') 
-  order by key
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@src_4
-POSTHOOK: Output: default@src_5
-POSTHOOK: Lineage: src_4.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_4.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-RUN: Stage-5:MAPRED
-RUN: Stage-2:MAPRED
-RUN: Stage-1:MOVE
-RUN: Stage-0:MOVE
-RUN: Stage-3:STATS
-RUN: Stage-4:STATS
-PREHOOK: query: select * from src_4
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_4
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_4
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_4
-#### A masked pattern was here ####
-90	val_90
-90	val_90
-90	val_90
-92	val_92
-95	val_95
-95	val_95
-96	val_96
-97	val_97
-97	val_97
-98	val_98
-98	val_98
-PREHOOK: query: select * from src_5
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src_5
-#### A masked pattern was here ####
-POSTHOOK: query: select * from src_5
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src_5
-#### A masked pattern was here ####
-0	val_0
-0	val_0
-0	val_0
-10	val_10
-100	val_100
-100	val_100
-103	val_103
-103	val_103
-104	val_104
-104	val_104
-105	val_105
-11	val_11
-111	val_111
-113	val_113
-113	val_113
-114	val_114
-116	val_116
-118	val_118
-118	val_118
-119	val_119
-119	val_119
-119	val_119
-12	val_12
-12	val_12
-120	val_120
-120	val_120
-125	val_125
-125	val_125
-126	val_126
-128	val_128
-128	val_128
-128	val_128
-129	val_129
-129	val_129
-131	val_131
-133	val_133
-134	val_134
-134	val_134
-136	val_136
-137	val_137
-137	val_137
-138	val_138
-138	val_138
-138	val_138
-138	val_138
-143	val_143
-145	val_145
-146	val_146
-146	val_146
-149	val_149
-149	val_149
-15	val_15
-15	val_15
-150	val_150
-152	val_152
-152	val_152
-153	val_153
-155	val_155
-156	val_156
-157	val_157
-158	val_158
-160	val_160
-162	val_162
-163	val_163
-164	val_164
-164	val_164
-165	val_165
-165	val_165
-166	val_166
-167	val_167
-167	val_167
-167	val_167
-168	val_168
-169	val_169
-169	val_169
-169	val_169
-169	val_169
-17	val_17
-170	val_170
-172	val_172
-172	val_172
-174	val_174
-174	val_174
-175	val_175
-175	val_175
-176	val_176
-176	val_176
-177	val_177
-178	val_178
-179	val_179
-179	val_179
-18	val_18
-18	val_18
-180	val_180
-181	val_181
-183	val_183
-186	val_186
-187	val_187
-187	val_187
-187	val_187
-189	val_189
-19	val_19
-190	val_190
-191	val_191
-191	val_191
-192	val_192
-193	val_193
-193	val_193
-193	val_193
-194	val_194
-195	val_195
-195	val_195
-196	val_196
-197	val_197
-197	val_197
-199	val_199
-199	val_199
-199	val_199
-2	val_2

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.out b/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.out
index 04dd9b4..d6df85a 100644
--- a/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.out
+++ b/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.out
@@ -73,8 +73,8 @@ STAGE PLANS:
     Spark
       Edges:
         Reducer 2 <- Map 10 (PARTITION-LEVEL SORT, 1), Reducer 9 (PARTITION-LEVEL SORT, 1)
-        Reducer 3 <- Map 7 (PARTITION-LEVEL SORT, 1), Reducer 2 (PARTITION-LEVEL SORT, 1)
-        Reducer 5 <- Map 11 (PARTITION-LEVEL SORT, 1), Map 6 (PARTITION-LEVEL SORT, 1)
+        Reducer 3 <- Map 7 (PARTITION-LEVEL SORT, 4), Reducer 2 (PARTITION-LEVEL SORT, 4)
+        Reducer 5 <- Map 11 (PARTITION-LEVEL SORT, 4), Map 6 (PARTITION-LEVEL SORT, 4)
         Reducer 9 <- Map 8 (GROUP, 1)
         Reducer 4 <- Reducer 3 (SORT, 1)
 #### A masked pattern was here ####
@@ -105,21 +105,21 @@ STAGE PLANS:
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: ((key > '9') and value is not null) (type: boolean)
-                    Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                       Group By Operator
                         keys: _col0 (type: string), _col1 (type: string)
                         mode: hash
                         outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                         Reduce Output Operator
                           key expressions: _col0 (type: string), _col1 (type: string)
                           sort order: ++
                           Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
-                          Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                          Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
         Map 7 
             Map Operator Tree:
                 TableScan
@@ -235,19 +235,17 @@ STAGE PLANS:
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
                   predicate: (_col0 = 0) (type: boolean)
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: 0 (type: bigint)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
-                      keys: _col0 (type: bigint)
+                      keys: 0 (type: bigint)
                       mode: hash
                       outputColumnNames: _col0
-                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         sort order: 
-                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
 
   Stage: Stage-1
     Move Operator
@@ -308,10 +306,10 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@src_4
 POSTHOOK: Output: default@src_5
-POSTHOOK: Lineage: src_4.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_4.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
 RUN: Stage-2:MAPRED
 RUN: Stage-1:MOVE
 RUN: Stage-0:MOVE
@@ -514,16 +512,16 @@ STAGE PLANS:
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: ((key > '9') and value is not null) (type: boolean)
-                    Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                       Group By Operator
                         keys: _col0 (type: string), _col1 (type: string)
                         mode: hash
                         outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                         Spark HashTable Sink Operator
                           keys:
                             0 key (type: string), value (type: string)
@@ -578,16 +576,14 @@ STAGE PLANS:
                 Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
                   predicate: (_col0 = 0) (type: boolean)
-                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: 0 (type: bigint)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
-                      keys: _col0 (type: bigint)
+                      keys: 0 (type: bigint)
                       mode: hash
                       outputColumnNames: _col0
-                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
                       Spark HashTable Sink Operator
                         keys:
                           0 
@@ -730,10 +726,10 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@src_4
 POSTHOOK: Output: default@src_5
-POSTHOOK: Lineage: src_4.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_4.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.key EXPRESSION [(src)b.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: src_5.value EXPRESSION [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_4.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.key SIMPLE [(src)b.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_5.value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ]
 RUN: Stage-5:MAPRED
 RUN: Stage-2:MAPRED
 RUN: Stage-1:MOVE

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.java1.7.out b/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.java1.7.out
deleted file mode 100644
index 86b7544..0000000
--- a/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.java1.7.out
+++ /dev/null
@@ -1,217 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE over1korc
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE over1korc
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k
-POSTHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over1k
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over1k
-PREHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1korc
-PREHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1korc
-POSTHOOK: Lineage: over1korc.b SIMPLE [(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1korc.bin SIMPLE [(over1k)over1k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: over1korc.bo SIMPLE [(over1k)over1k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: over1korc.d SIMPLE [(over1k)over1k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: over1korc.dec SIMPLE [(over1k)over1k.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
-POSTHOOK: Lineage: over1korc.f SIMPLE [(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1korc.i SIMPLE [(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1korc.s SIMPLE [(over1k)over1k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: over1korc.si SIMPLE [(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1korc.t SIMPLE [(over1k)over1k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: over1korc.ts SIMPLE [(over1k)over1k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-PREHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (GROUP, 2)
-        Reducer 3 <- Reducer 2 (SORT, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: over1korc
-                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: i (type: int)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                    Group By Operator
-                      aggregations: avg(50), avg(50.0), avg(50)
-                      keys: _col0 (type: int)
-                      mode: hash
-                      outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: int)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: int)
-                        Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col1 (type: struct<count:bigint,sum:double,input:int>), _col2 (type: struct<count:bigint,sum:double,input:double>), _col3 (type: struct<count:bigint,sum:decimal(12,0),input:decimal(10,0)>)
-            Execution mode: vectorized
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: avg(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2)
-                keys: KEY._col0 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: int)
-                  sort order: +
-                  Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                  TopN Hash Memory Usage: 0.1
-                  value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: decimal(14,4))
-        Reducer 3 
-            Execution mode: vectorized
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: decimal(14,4))
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                Limit
-                  Number of rows: 10
-                  Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: 10
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-65536	50.0	50.0	50.0000
-65537	50.0	50.0	50.0000
-65538	50.0	50.0	50.0000
-65539	50.0	50.0	50.0000
-65540	50.0	50.0	50.0000
-65541	50.0	50.0	50.0000
-65542	50.0	50.0	50.0000
-65543	50.0	50.0	50.0000
-65544	50.0	50.0	50.0000
-65545	50.0	50.0	50.0000

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.java1.8.out b/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.java1.8.out
deleted file mode 100644
index 69f4754..0000000
--- a/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.java1.8.out
+++ /dev/null
@@ -1,203 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE over1korc
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE over1korc
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k
-POSTHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over1k
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over1k
-PREHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1korc
-PREHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1korc
-POSTHOOK: Lineage: over1korc.b SIMPLE [(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1korc.bin SIMPLE [(over1k)over1k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: over1korc.bo SIMPLE [(over1k)over1k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: over1korc.d SIMPLE [(over1k)over1k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: over1korc.dec SIMPLE [(over1k)over1k.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
-POSTHOOK: Lineage: over1korc.f SIMPLE [(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1korc.i SIMPLE [(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1korc.s SIMPLE [(over1k)over1k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: over1korc.si SIMPLE [(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1korc.t SIMPLE [(over1k)over1k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: over1korc.ts SIMPLE [(over1k)over1k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-PREHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (GROUP, 2)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: over1korc
-                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: i (type: int)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                    Group By Operator
-                      aggregations: avg(50), avg(50.0), avg(50)
-                      keys: _col0 (type: int)
-                      mode: hash
-                      outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: int)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: int)
-                        Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col1 (type: struct<count:bigint,sum:double,input:int>), _col2 (type: struct<count:bigint,sum:double,input:double>), _col3 (type: struct<count:bigint,sum:decimal(12,0),input:decimal(10,0)>)
-            Execution mode: vectorized
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: avg(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2)
-                keys: KEY._col0 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                Limit
-                  Number of rows: 10
-                  Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: 10
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-65636	50.0	50.0	50
-65550	50.0	50.0	50
-65592	50.0	50.0	50
-65744	50.0	50.0	50
-65722	50.0	50.0	50
-65668	50.0	50.0	50
-65598	50.0	50.0	50
-65596	50.0	50.0	50
-65568	50.0	50.0	50
-65738	50.0	50.0	50

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out b/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out
index 63cdc24..0459d93 100644
--- a/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out
+++ b/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out
@@ -102,14 +102,14 @@ PREHOOK: query: EXPLAIN SELECT
   AVG(CAST(50 AS INT)) AS `avg_int_ok`,
   AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
   AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN SELECT 
   i,
   AVG(CAST(50 AS INT)) AS `avg_int_ok`,
   AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
   AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -119,7 +119,8 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 2 <- Map 1 (GROUP, 2)
+        Reducer 2 <- Map 1 (GROUP, 4)
+        Reducer 3 <- Reducer 2 (SORT, 1)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -129,11 +130,11 @@ STAGE PLANS:
                   Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: i (type: int)
-                    outputColumnNames: i
+                    outputColumnNames: _col0
                     Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
-                      aggregations: avg(50), avg(UDFToDouble(50)), avg(CAST( 50 AS decimal(10,0)))
-                      keys: i (type: int)
+                      aggregations: avg(50), avg(50.0), avg(50)
+                      keys: _col0 (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1, _col2, _col3
                       Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
@@ -152,6 +153,19 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3
                 Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int)
+                  sort order: +
+                  Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
+                  TopN Hash Memory Usage: 0.1
+                  value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: decimal(14,4))
+        Reducer 3 
+            Execution mode: vectorized
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: decimal(14,4))
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
                 Limit
                   Number of rows: 10
                   Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
@@ -159,8 +173,8 @@ STAGE PLANS:
                     compressed: false
                     Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
                     table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                         serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
@@ -174,7 +188,7 @@ PREHOOK: query: SELECT
   AVG(CAST(50 AS INT)) AS `avg_int_ok`,
   AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
   AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@over1korc
 #### A masked pattern was here ####
@@ -183,17 +197,17 @@ POSTHOOK: query: SELECT
   AVG(CAST(50 AS INT)) AS `avg_int_ok`,
   AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
   AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1korc
 #### A masked pattern was here ####
-65598	50.0	50.0	50
-65694	50.0	50.0	50
-65678	50.0	50.0	50
-65684	50.0	50.0	50
-65596	50.0	50.0	50
-65692	50.0	50.0	50
-65630	50.0	50.0	50
-65674	50.0	50.0	50
-65628	50.0	50.0	50
-65776	50.0	50.0	50
+65536	50.0	50.0	50.0000
+65537	50.0	50.0	50.0000
+65538	50.0	50.0	50.0000
+65539	50.0	50.0	50.0000
+65540	50.0	50.0	50.0000
+65541	50.0	50.0	50.0000
+65542	50.0	50.0	50.0000
+65543	50.0	50.0	50.0000
+65544	50.0	50.0	50.0000
+65545	50.0	50.0	50.0000

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/stats_list_bucket.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats_list_bucket.q.java1.7.out b/ql/src/test/results/clientpositive/stats_list_bucket.q.java1.7.out
deleted file mode 100644
index a4908bc..0000000
--- a/ql/src/test/results/clientpositive/stats_list_bucket.q.java1.7.out
+++ /dev/null
@@ -1,191 +0,0 @@
-PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-drop table stats_list_bucket
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-drop table stats_list_bucket
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: drop table stats_list_bucket_1
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table stats_list_bucket_1
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table stats_list_bucket (
-  c1 string,
-  c2 string
-) partitioned by (ds string, hr string)
-skewed by (c1, c2) on  (('466','val_466'),('287','val_287'),('82','val_82'))
-stored as directories
-stored as rcfile
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@stats_list_bucket
-POSTHOOK: query: create table stats_list_bucket (
-  c1 string,
-  c2 string
-) partitioned by (ds string, hr string)
-skewed by (c1, c2) on  (('466','val_466'),('287','val_287'),('82','val_82'))
-stored as directories
-stored as rcfile
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@stats_list_bucket
-PREHOOK: query: -- Try partitioned table with list bucketing.
--- The stats should show 500 rows loaded, as many rows as the src table has.
-
-insert overwrite table stats_list_bucket partition (ds = '2008-04-08',  hr = '11')
-  select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@stats_list_bucket@ds=2008-04-08/hr=11
-POSTHOOK: query: -- Try partitioned table with list bucketing.
--- The stats should show 500 rows loaded, as many rows as the src table has.
-
-insert overwrite table stats_list_bucket partition (ds = '2008-04-08',  hr = '11')
-  select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@stats_list_bucket@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: stats_list_bucket PARTITION(ds=2008-04-08,hr=11).c1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: stats_list_bucket PARTITION(ds=2008-04-08,hr=11).c2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: desc formatted stats_list_bucket partition (ds = '2008-04-08',  hr = '11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_list_bucket
-POSTHOOK: query: desc formatted stats_list_bucket partition (ds = '2008-04-08',  hr = '11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_list_bucket
-# col_name            	data_type           	comment             
-	 	 
-c1                  	string              	                    
-c2                  	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	stats_list_bucket   	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	4                   
-	numRows             	500                 
-	rawDataSize         	4812                
-	totalSize           	5522                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[c1, c2]            	 
-Skewed Values:      	[[466, val_466], [287, val_287], [82, val_82]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[82, val_82]=/stats_list_bucket/ds=2008-04-08/hr=11/c1=82/c2=val_82, [466, val_466]=/stats_list_bucket/ds=2008-04-08/hr=11/c1=466/c2=val_466, [287, val_287]=/stats_list_bucket/ds=2008-04-08/hr=11/c1=287/c2=val_287}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: -- Also try non-partitioned table with list bucketing.
--- Stats should show the same number of rows.
-
-create table stats_list_bucket_1 (
-  c1 string,
-  c2 string
-)
-skewed by (c1, c2) on  (('466','val_466'),('287','val_287'),('82','val_82'))
-stored as directories
-stored as rcfile
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@stats_list_bucket_1
-POSTHOOK: query: -- Also try non-partitioned table with list bucketing.
--- Stats should show the same number of rows.
-
-create table stats_list_bucket_1 (
-  c1 string,
-  c2 string
-)
-skewed by (c1, c2) on  (('466','val_466'),('287','val_287'),('82','val_82'))
-stored as directories
-stored as rcfile
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@stats_list_bucket_1
-PREHOOK: query: insert overwrite table stats_list_bucket_1
-  select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@stats_list_bucket_1
-POSTHOOK: query: insert overwrite table stats_list_bucket_1
-  select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@stats_list_bucket_1
-POSTHOOK: Lineage: stats_list_bucket_1.c1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: stats_list_bucket_1.c2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: desc formatted stats_list_bucket_1
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_list_bucket_1
-POSTHOOK: query: desc formatted stats_list_bucket_1
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_list_bucket_1
-# col_name            	data_type           	comment             
-	 	 
-c1                  	string              	                    
-c2                  	string              	                    
-	 	 
-# Detailed Table Information	 	 
-Database:           	default             	 
-#### A masked pattern was here ####
-Retention:          	0                   	 
-#### A masked pattern was here ####
-Table Type:         	MANAGED_TABLE       	 
-Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	4                   
-	numRows             	500                 
-	rawDataSize         	4812                
-	totalSize           	5522                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[c1, c2]            	 
-Skewed Values:      	[[466, val_466], [287, val_287], [82, val_82]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[82, val_82]=/stats_list_bucket_1/c1=82/c2=val_82, [466, val_466]=/stats_list_bucket_1/c1=466/c2=val_466, [287, val_287]=/stats_list_bucket_1/c1=287/c2=val_287}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: drop table stats_list_bucket
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@stats_list_bucket
-PREHOOK: Output: default@stats_list_bucket
-POSTHOOK: query: drop table stats_list_bucket
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@stats_list_bucket
-POSTHOOK: Output: default@stats_list_bucket
-PREHOOK: query: drop table stats_list_bucket_1
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@stats_list_bucket_1
-PREHOOK: Output: default@stats_list_bucket_1
-POSTHOOK: query: drop table stats_list_bucket_1
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@stats_list_bucket_1
-POSTHOOK: Output: default@stats_list_bucket_1

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/stats_list_bucket.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats_list_bucket.q.java1.8.out b/ql/src/test/results/clientpositive/stats_list_bucket.q.java1.8.out
deleted file mode 100644
index 8688cee..0000000
--- a/ql/src/test/results/clientpositive/stats_list_bucket.q.java1.8.out
+++ /dev/null
@@ -1,193 +0,0 @@
-PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-drop table stats_list_bucket
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-drop table stats_list_bucket
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: drop table stats_list_bucket_1
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table stats_list_bucket_1
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table stats_list_bucket (
-  c1 string,
-  c2 string
-) partitioned by (ds string, hr string)
-skewed by (c1, c2) on  (('466','val_466'),('287','val_287'),('82','val_82'))
-stored as directories
-stored as rcfile
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@stats_list_bucket
-POSTHOOK: query: create table stats_list_bucket (
-  c1 string,
-  c2 string
-) partitioned by (ds string, hr string)
-skewed by (c1, c2) on  (('466','val_466'),('287','val_287'),('82','val_82'))
-stored as directories
-stored as rcfile
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@stats_list_bucket
-PREHOOK: query: -- Make sure we use hashed IDs during stats publishing.
--- Try partitioned table with list bucketing.
--- The stats should show 500 rows loaded, as many rows as the src table has.
-
-insert overwrite table stats_list_bucket partition (ds = '2008-04-08',  hr = '11')
-  select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@stats_list_bucket@ds=2008-04-08/hr=11
-POSTHOOK: query: -- Make sure we use hashed IDs during stats publishing.
--- Try partitioned table with list bucketing.
--- The stats should show 500 rows loaded, as many rows as the src table has.
-
-insert overwrite table stats_list_bucket partition (ds = '2008-04-08',  hr = '11')
-  select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@stats_list_bucket@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: stats_list_bucket PARTITION(ds=2008-04-08,hr=11).c1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: stats_list_bucket PARTITION(ds=2008-04-08,hr=11).c2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: desc formatted stats_list_bucket partition (ds = '2008-04-08',  hr = '11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_list_bucket
-POSTHOOK: query: desc formatted stats_list_bucket partition (ds = '2008-04-08',  hr = '11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_list_bucket
-# col_name            	data_type           	comment             
-	 	 
-c1                  	string              	                    
-c2                  	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	stats_list_bucket   	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	4                   
-	numRows             	500                 
-	rawDataSize         	4812                
-	totalSize           	5522                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[c1, c2]            	 
-Skewed Values:      	[[466, val_466], [287, val_287], [82, val_82]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[466, val_466]=/stats_list_bucket/ds=2008-04-08/hr=11/c1=466/c2=val_466, [287, val_287]=/stats_list_bucket/ds=2008-04-08/hr=11/c1=287/c2=val_287, [82, val_82]=/stats_list_bucket/ds=2008-04-08/hr=11/c1=82/c2=val_82}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: -- Also try non-partitioned table with list bucketing.
--- Stats should show the same number of rows.
-
-create table stats_list_bucket_1 (
-  c1 string,
-  c2 string
-)
-skewed by (c1, c2) on  (('466','val_466'),('287','val_287'),('82','val_82'))
-stored as directories
-stored as rcfile
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@stats_list_bucket_1
-POSTHOOK: query: -- Also try non-partitioned table with list bucketing.
--- Stats should show the same number of rows.
-
-create table stats_list_bucket_1 (
-  c1 string,
-  c2 string
-)
-skewed by (c1, c2) on  (('466','val_466'),('287','val_287'),('82','val_82'))
-stored as directories
-stored as rcfile
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@stats_list_bucket_1
-PREHOOK: query: insert overwrite table stats_list_bucket_1
-  select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@stats_list_bucket_1
-POSTHOOK: query: insert overwrite table stats_list_bucket_1
-  select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@stats_list_bucket_1
-POSTHOOK: Lineage: stats_list_bucket_1.c1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: stats_list_bucket_1.c2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: desc formatted stats_list_bucket_1
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@stats_list_bucket_1
-POSTHOOK: query: desc formatted stats_list_bucket_1
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@stats_list_bucket_1
-# col_name            	data_type           	comment             
-	 	 
-c1                  	string              	                    
-c2                  	string              	                    
-	 	 
-# Detailed Table Information	 	 
-Database:           	default             	 
-#### A masked pattern was here ####
-Retention:          	0                   	 
-#### A masked pattern was here ####
-Table Type:         	MANAGED_TABLE       	 
-Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	4                   
-	numRows             	500                 
-	rawDataSize         	4812                
-	totalSize           	5522                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[c1, c2]            	 
-Skewed Values:      	[[466, val_466], [287, val_287], [82, val_82]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[466, val_466]=/stats_list_bucket_1/c1=466/c2=val_466, [82, val_82]=/stats_list_bucket_1/c1=82/c2=val_82, [287, val_287]=/stats_list_bucket_1/c1=287/c2=val_287}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: drop table stats_list_bucket
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@stats_list_bucket
-PREHOOK: Output: default@stats_list_bucket
-POSTHOOK: query: drop table stats_list_bucket
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@stats_list_bucket
-POSTHOOK: Output: default@stats_list_bucket
-PREHOOK: query: drop table stats_list_bucket_1
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@stats_list_bucket_1
-PREHOOK: Output: default@stats_list_bucket_1
-POSTHOOK: query: drop table stats_list_bucket_1
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@stats_list_bucket_1
-POSTHOOK: Output: default@stats_list_bucket_1


[34/34] hive git commit: HIVE-13409: Fix JDK8 test failures related to COLUMN_STATS_ACCURATE (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
HIVE-13409: Fix JDK8 test failures related to COLUMN_STATS_ACCURATE (Mohit Sabharwal, reviewed by Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cdb872a1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cdb872a1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cdb872a1

Branch: refs/heads/master
Commit: cdb872a117c95e0203c1cae8ffe8add40cfd4f0f
Parents: 2254161
Author: Mohit Sabharwal <mo...@cloudera.com>
Authored: Tue May 24 09:30:32 2016 -0500
Committer: Sergio Pena <se...@cloudera.com>
Committed: Fri May 27 21:08:31 2016 -0500

----------------------------------------------------------------------
 .../test/results/clientpositive/bucket1.q.out   |  4 +-
 .../test/results/clientpositive/bucket2.q.out   |  4 +-
 .../test/results/clientpositive/bucket3.q.out   |  4 +-
 .../test/results/clientpositive/bucket4.q.out   |  4 +-
 .../test/results/clientpositive/bucket5.q.out   |  4 +-
 .../results/clientpositive/bucket_many.q.out    |  4 +-
 .../columnStatsUpdateForStatsOptimizer_1.q.out  |  8 +--
 .../columnStatsUpdateForStatsOptimizer_2.q.out  |  2 +-
 .../constantPropagateForSubQuery.q.out          |  8 +--
 ql/src/test/results/clientpositive/ctas.q.out   |  4 +-
 .../results/clientpositive/describe_table.q.out |  4 +-
 .../disable_merge_for_bucketing.q.out           |  4 +-
 .../extrapolate_part_stats_full.q.out           | 24 ++++-----
 .../extrapolate_part_stats_partial.q.out        | 32 +++++------
 .../extrapolate_part_stats_partial_ndv.q.out    | 16 +++---
 .../clientpositive/fouter_join_ppr.q.out        | 40 +++++++-------
 .../clientpositive/groupby_map_ppr.q.out        |  4 +-
 .../groupby_map_ppr_multi_distinct.q.out        |  4 +-
 .../results/clientpositive/groupby_ppr.q.out    |  4 +-
 .../groupby_ppr_multi_distinct.q.out            |  4 +-
 .../test/results/clientpositive/input23.q.out   |  2 +-
 .../test/results/clientpositive/input42.q.out   | 12 ++---
 .../results/clientpositive/input_part1.q.out    |  2 +-
 .../results/clientpositive/input_part2.q.out    |  4 +-
 .../results/clientpositive/input_part7.q.out    |  4 +-
 .../results/clientpositive/input_part9.q.out    |  4 +-
 ql/src/test/results/clientpositive/join17.q.out |  4 +-
 ql/src/test/results/clientpositive/join26.q.out |  2 +-
 ql/src/test/results/clientpositive/join32.q.out | 10 ++--
 .../clientpositive/join32_lessSize.q.out        | 46 ++++++++--------
 ql/src/test/results/clientpositive/join33.q.out | 10 ++--
 ql/src/test/results/clientpositive/join34.q.out |  8 +--
 ql/src/test/results/clientpositive/join35.q.out | 12 ++---
 ql/src/test/results/clientpositive/join9.q.out  |  6 +--
 .../results/clientpositive/join_map_ppr.q.out   |  4 +-
 .../clientpositive/list_bucket_dml_1.q.out      |  4 +-
 .../clientpositive/list_bucket_dml_14.q.out     |  4 +-
 .../clientpositive/list_bucket_dml_3.q.out      |  4 +-
 .../clientpositive/list_bucket_dml_7.q.out      |  8 +--
 .../results/clientpositive/load_dyn_part8.q.out |  8 +--
 .../clientpositive/louter_join_ppr.q.out        | 36 ++++++-------
 .../clientpositive/mapjoin_mapjoin.q.out        | 16 +++---
 .../offset_limit_global_optimizer.q.out         | 52 +++++++++---------
 .../clientpositive/optimize_nullscan.q.out      | 56 ++++++++++----------
 .../partition_coltype_literals.q.out            |  4 +-
 ql/src/test/results/clientpositive/pcr.q.out    | 10 ++--
 ql/src/test/results/clientpositive/pcs.q.out    | 38 ++++++-------
 .../clientpositive/ppd_join_filter.q.out        | 32 +++++------
 ql/src/test/results/clientpositive/ppd_vc.q.out | 20 +++----
 .../clientpositive/ppr_allchildsarenull.q.out   | 12 ++---
 .../clientpositive/rand_partitionpruner1.q.out  |  4 +-
 .../clientpositive/rand_partitionpruner2.q.out  |  4 +-
 .../clientpositive/rand_partitionpruner3.q.out  |  4 +-
 .../clientpositive/reduce_deduplicate.q.out     |  4 +-
 .../results/clientpositive/regexp_extract.q.out |  8 +--
 .../clientpositive/router_join_ppr.q.out        | 36 ++++++-------
 .../test/results/clientpositive/sample1.q.out   |  2 +-
 .../test/results/clientpositive/sample2.q.out   |  4 +-
 .../test/results/clientpositive/sample4.q.out   |  4 +-
 .../test/results/clientpositive/sample5.q.out   |  4 +-
 .../test/results/clientpositive/sample6.q.out   | 32 +++++------
 .../test/results/clientpositive/sample7.q.out   |  4 +-
 .../test/results/clientpositive/sample8.q.out   |  8 +--
 .../test/results/clientpositive/sample9.q.out   |  4 +-
 .../clientpositive/schema_evol_stats.q.out      |  8 +--
 .../clientpositive/serde_user_properties.q.out  | 12 ++---
 .../results/clientpositive/spark/bucket2.q.out  |  4 +-
 .../results/clientpositive/spark/bucket3.q.out  |  4 +-
 .../results/clientpositive/spark/bucket4.q.out  |  4 +-
 .../results/clientpositive/spark/ctas.q.out     |  4 +-
 .../spark/disable_merge_for_bucketing.q.out     |  4 +-
 .../clientpositive/spark/groupby_map_ppr.q.out  |  4 +-
 .../spark/groupby_map_ppr_multi_distinct.q.out  |  4 +-
 .../clientpositive/spark/groupby_ppr.q.out      |  4 +-
 .../spark/groupby_ppr_multi_distinct.q.out      |  4 +-
 .../clientpositive/spark/input_part2.q.out      |  4 +-
 .../results/clientpositive/spark/join17.q.out   |  8 +--
 .../results/clientpositive/spark/join26.q.out   | 10 ++--
 .../results/clientpositive/spark/join32.q.out   | 10 ++--
 .../clientpositive/spark/join32_lessSize.q.out  | 46 ++++++++--------
 .../results/clientpositive/spark/join33.q.out   | 10 ++--
 .../results/clientpositive/spark/join34.q.out   | 12 ++---
 .../results/clientpositive/spark/join35.q.out   | 12 ++---
 .../results/clientpositive/spark/join9.q.out    |  6 +--
 .../clientpositive/spark/join_map_ppr.q.out     | 12 ++---
 .../clientpositive/spark/load_dyn_part8.q.out   |  8 +--
 .../clientpositive/spark/louter_join_ppr.q.out  | 36 ++++++-------
 .../clientpositive/spark/mapjoin_mapjoin.q.out  | 16 +++---
 .../spark/optimize_nullscan.q.out               | 56 ++++++++++----------
 .../test/results/clientpositive/spark/pcr.q.out | 10 ++--
 .../clientpositive/spark/ppd_join_filter.q.out  | 32 +++++------
 .../clientpositive/spark/router_join_ppr.q.out  | 36 ++++++-------
 .../results/clientpositive/spark/sample1.q.out  |  2 +-
 .../results/clientpositive/spark/sample2.q.out  |  4 +-
 .../results/clientpositive/spark/sample4.q.out  |  4 +-
 .../results/clientpositive/spark/sample5.q.out  |  4 +-
 .../results/clientpositive/spark/sample6.q.out  | 32 +++++------
 .../results/clientpositive/spark/sample7.q.out  |  4 +-
 .../results/clientpositive/spark/sample8.q.out  | 10 ++--
 .../results/clientpositive/spark/stats0.q.out   |  8 +--
 .../clientpositive/spark/stats_only_null.q.out  |  4 +-
 .../spark/subquery_multiinsert.q.out            |  4 +-
 .../clientpositive/spark/transform_ppr1.q.out   |  8 +--
 .../clientpositive/spark/transform_ppr2.q.out   |  4 +-
 .../spark/vector_cast_constant.q.out            |  2 +-
 ql/src/test/results/clientpositive/stats0.q.out |  8 +--
 .../clientpositive/stats_invalidation.q.out     |  2 +-
 .../clientpositive/stats_only_null.q.out        |  4 +-
 .../results/clientpositive/tez/bucket3.q.out    |  4 +-
 .../results/clientpositive/tez/bucket4.q.out    |  4 +-
 .../test/results/clientpositive/tez/ctas.q.out  |  4 +-
 .../tez/disable_merge_for_bucketing.q.out       |  4 +-
 .../clientpositive/tez/mapjoin_mapjoin.q.out    | 16 +++---
 .../clientpositive/tez/optimize_nullscan.q.out  | 56 ++++++++++----------
 .../results/clientpositive/tez/sample1.q.out    |  2 +-
 .../clientpositive/tez/schema_evol_stats.q.out  |  8 +--
 .../clientpositive/tez/stats_only_null.q.out    |  4 +-
 .../clientpositive/tez/transform_ppr1.q.out     |  8 +--
 .../clientpositive/tez/transform_ppr2.q.out     |  4 +-
 .../results/clientpositive/transform_ppr1.q.out |  8 +--
 .../results/clientpositive/transform_ppr2.q.out |  4 +-
 .../results/clientpositive/udf_explode.q.out    |  8 +--
 .../results/clientpositive/udtf_explode.q.out   |  8 +--
 .../test/results/clientpositive/union_ppr.q.out |  4 +-
 124 files changed, 684 insertions(+), 684 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/bucket1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket1.q.out b/ql/src/test/results/clientpositive/bucket1.q.out
index 78fb530..92ecd67 100644
--- a/ql/src/test/results/clientpositive/bucket1.q.out
+++ b/ql/src/test/results/clientpositive/bucket1.q.out
@@ -52,7 +52,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -72,7 +72,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/bucket2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket2.q.out b/ql/src/test/results/clientpositive/bucket2.q.out
index 297984e..b849ed3 100644
--- a/ql/src/test/results/clientpositive/bucket2.q.out
+++ b/ql/src/test/results/clientpositive/bucket2.q.out
@@ -52,7 +52,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -72,7 +72,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/bucket3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket3.q.out b/ql/src/test/results/clientpositive/bucket3.q.out
index 29afaea..fa8b0f9 100644
--- a/ql/src/test/results/clientpositive/bucket3.q.out
+++ b/ql/src/test/results/clientpositive/bucket3.q.out
@@ -52,7 +52,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -72,7 +72,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/bucket4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket4.q.out b/ql/src/test/results/clientpositive/bucket4.q.out
index 803a2bb..ec28d09 100644
--- a/ql/src/test/results/clientpositive/bucket4.q.out
+++ b/ql/src/test/results/clientpositive/bucket4.q.out
@@ -49,7 +49,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -69,7 +69,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/bucket5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket5.q.out b/ql/src/test/results/clientpositive/bucket5.q.out
index 2e37eef..bd1013a 100644
--- a/ql/src/test/results/clientpositive/bucket5.q.out
+++ b/ql/src/test/results/clientpositive/bucket5.q.out
@@ -94,7 +94,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -114,7 +114,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/bucket_many.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/bucket_many.q.out b/ql/src/test/results/clientpositive/bucket_many.q.out
index 9bd90b1..87954e0 100644
--- a/ql/src/test/results/clientpositive/bucket_many.q.out
+++ b/ql/src/test/results/clientpositive/bucket_many.q.out
@@ -48,7 +48,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -68,7 +68,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out
index d812193..00f3776 100644
--- a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out
+++ b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out
@@ -204,7 +204,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"year\":\"true\",\"month\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"year\":\"true\",\"month\":\"true\"}}
 	numFiles            	1                   
 	numRows             	3                   
 	rawDataSize         	21                  
@@ -429,7 +429,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"year\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"year\":\"true\"}}
 	numFiles            	2                   
 	numRows             	4                   
 	rawDataSize         	28                  
@@ -552,7 +552,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"month\":\"true\",\"year\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"month\":\"true\",\"year\":\"true\"}}
 	numFiles            	2                   
 	numRows             	4                   
 	rawDataSize         	28                  
@@ -737,7 +737,7 @@ Database:           	default
 Table:              	calendarp           	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"year\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"year\":\"true\"}}
 	numFiles            	1                   
 	numRows             	3                   
 	rawDataSize         	12                  

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out
index 179bc66..48e7a40 100644
--- a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out
+++ b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out
@@ -81,7 +81,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"year\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"year\":\"true\"}}
 	numFiles            	2                   
 	numRows             	3                   
 	rawDataSize         	24                  

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/constantPropagateForSubQuery.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/constantPropagateForSubQuery.q.out b/ql/src/test/results/clientpositive/constantPropagateForSubQuery.q.out
index 2aa8d77..f90cdb6 100644
--- a/ql/src/test/results/clientpositive/constantPropagateForSubQuery.q.out
+++ b/ql/src/test/results/clientpositive/constantPropagateForSubQuery.q.out
@@ -60,7 +60,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -80,7 +80,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -104,7 +104,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -124,7 +124,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/ctas.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/ctas.q.out b/ql/src/test/results/clientpositive/ctas.q.out
index 8d6c117..afd6d7c 100644
--- a/ql/src/test/results/clientpositive/ctas.q.out
+++ b/ql/src/test/results/clientpositive/ctas.q.out
@@ -731,7 +731,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -751,7 +751,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/describe_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/describe_table.q.out b/ql/src/test/results/clientpositive/describe_table.q.out
index 19664b0..ad3bfc1 100644
--- a/ql/src/test/results/clientpositive/describe_table.q.out
+++ b/ql/src/test/results/clientpositive/describe_table.q.out
@@ -230,7 +230,7 @@ Database:           	default
 Table:              	srcpart             	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	5312                
@@ -315,7 +315,7 @@ Database:           	default
 Table:              	srcpart             	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	5312                

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out b/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out
index c9aed0d..ba7c640 100644
--- a/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out
+++ b/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out
@@ -48,7 +48,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -68,7 +68,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out b/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out
index fa2c77e..8f40040 100644
--- a/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out
+++ b/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out
@@ -104,7 +104,7 @@ STAGE PLANS:
             partition values:
               year 2000
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -147,7 +147,7 @@ STAGE PLANS:
             partition values:
               year 2001
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -217,7 +217,7 @@ STAGE PLANS:
             partition values:
               year 2000
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -260,7 +260,7 @@ STAGE PLANS:
             partition values:
               year 2001
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -400,7 +400,7 @@ STAGE PLANS:
               year 2000
               zip 94086
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid
               columns.comments 
@@ -444,7 +444,7 @@ STAGE PLANS:
               year 2001
               zip 94086
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid
               columns.comments 
@@ -488,7 +488,7 @@ STAGE PLANS:
               year 2000
               zip 94087
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid
               columns.comments 
@@ -532,7 +532,7 @@ STAGE PLANS:
               year 2001
               zip 94087
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid
               columns.comments 
@@ -599,7 +599,7 @@ STAGE PLANS:
               year 2000
               zip 94086
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid
               columns.comments 
@@ -643,7 +643,7 @@ STAGE PLANS:
               year 2001
               zip 94086
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid
               columns.comments 
@@ -687,7 +687,7 @@ STAGE PLANS:
               year 2000
               zip 94087
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid
               columns.comments 
@@ -731,7 +731,7 @@ STAGE PLANS:
               year 2001
               zip 94087
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid
               columns.comments 

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out b/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out
index 68652e9..0acfe90 100644
--- a/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out
+++ b/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out
@@ -164,7 +164,7 @@ STAGE PLANS:
             partition values:
               year 2001
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -207,7 +207,7 @@ STAGE PLANS:
             partition values:
               year 2002
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -363,7 +363,7 @@ STAGE PLANS:
             partition values:
               year 2001
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -406,7 +406,7 @@ STAGE PLANS:
             partition values:
               year 2002
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -535,7 +535,7 @@ STAGE PLANS:
             partition values:
               year 2000
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -578,7 +578,7 @@ STAGE PLANS:
             partition values:
               year 2001
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -621,7 +621,7 @@ STAGE PLANS:
             partition values:
               year 2002
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -664,7 +664,7 @@ STAGE PLANS:
             partition values:
               year 2003
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -730,7 +730,7 @@ STAGE PLANS:
             partition values:
               year 2000
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -773,7 +773,7 @@ STAGE PLANS:
             partition values:
               year 2001
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -816,7 +816,7 @@ STAGE PLANS:
             partition values:
               year 2002
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -859,7 +859,7 @@ STAGE PLANS:
             partition values:
               year 2003
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true"}}
               bucket_count -1
               columns state,locid,zip
               columns.comments 
@@ -1176,7 +1176,7 @@ STAGE PLANS:
               year 2001
               zip 94086
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid
               columns.comments 
@@ -1396,7 +1396,7 @@ STAGE PLANS:
               year 2002
               zip 94087
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid
               columns.comments 
@@ -1683,7 +1683,7 @@ STAGE PLANS:
               year 2001
               zip 94086
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid
               columns.comments 
@@ -1903,7 +1903,7 @@ STAGE PLANS:
               year 2002
               zip 94087
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true"}}
               bucket_count -1
               columns state,locid
               columns.comments 

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/extrapolate_part_stats_partial_ndv.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/extrapolate_part_stats_partial_ndv.q.out b/ql/src/test/results/clientpositive/extrapolate_part_stats_partial_ndv.q.out
index e82136f..70e4db3 100644
--- a/ql/src/test/results/clientpositive/extrapolate_part_stats_partial_ndv.q.out
+++ b/ql/src/test/results/clientpositive/extrapolate_part_stats_partial_ndv.q.out
@@ -232,7 +232,7 @@ STAGE PLANS:
             partition values:
               year 2001
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true","cnt":"true","zip":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true","cnt":"true","zip":"true"}}
               bucket_count -1
               columns state,locid,cnt,zip
               columns.comments 
@@ -275,7 +275,7 @@ STAGE PLANS:
             partition values:
               year 2002
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true","cnt":"true","zip":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true","cnt":"true","zip":"true"}}
               bucket_count -1
               columns state,locid,cnt,zip
               columns.comments 
@@ -476,7 +476,7 @@ STAGE PLANS:
             partition values:
               year 2000
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true","cnt":"true","zip":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true","cnt":"true","zip":"true"}}
               bucket_count -1
               columns state,locid,cnt,zip
               columns.comments 
@@ -519,7 +519,7 @@ STAGE PLANS:
             partition values:
               year 2001
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true","cnt":"true","zip":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true","cnt":"true","zip":"true"}}
               bucket_count -1
               columns state,locid,cnt,zip
               columns.comments 
@@ -562,7 +562,7 @@ STAGE PLANS:
             partition values:
               year 2002
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true","cnt":"true","zip":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true","cnt":"true","zip":"true"}}
               bucket_count -1
               columns state,locid,cnt,zip
               columns.comments 
@@ -605,7 +605,7 @@ STAGE PLANS:
             partition values:
               year 2003
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true","cnt":"true","zip":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true","cnt":"true","zip":"true"}}
               bucket_count -1
               columns state,locid,cnt,zip
               columns.comments 
@@ -993,7 +993,7 @@ STAGE PLANS:
               year 2001
               zip 94086
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true","cnt":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true","cnt":"true"}}
               bucket_count -1
               columns state,locid,cnt
               columns.comments 
@@ -1213,7 +1213,7 @@ STAGE PLANS:
               year 2002
               zip 94087
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"state":"true","locid":"true","cnt":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"state":"true","locid":"true","cnt":"true"}}
               bucket_count -1
               columns state,locid,cnt
               columns.comments 

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/fouter_join_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/fouter_join_ppr.q.out b/ql/src/test/results/clientpositive/fouter_join_ppr.q.out
index 45dbc36..048ab96 100644
--- a/ql/src/test/results/clientpositive/fouter_join_ppr.q.out
+++ b/ql/src/test/results/clientpositive/fouter_join_ppr.q.out
@@ -71,7 +71,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -91,7 +91,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -118,7 +118,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -164,7 +164,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -210,7 +210,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -256,7 +256,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -458,7 +458,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -478,7 +478,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -505,7 +505,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -551,7 +551,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -597,7 +597,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -643,7 +643,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -857,7 +857,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -877,7 +877,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -904,7 +904,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -950,7 +950,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1149,7 +1149,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1169,7 +1169,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1196,7 +1196,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1242,7 +1242,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/groupby_map_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_map_ppr.q.out b/ql/src/test/results/clientpositive/groupby_map_ppr.q.out
index 84999f2..24bf7a6 100644
--- a/ql/src/test/results/clientpositive/groupby_map_ppr.q.out
+++ b/ql/src/test/results/clientpositive/groupby_map_ppr.q.out
@@ -68,7 +68,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -114,7 +114,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
index 5cf8bb1..c3cb7fb 100644
--- a/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
+++ b/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
@@ -68,7 +68,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -114,7 +114,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/groupby_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_ppr.q.out b/ql/src/test/results/clientpositive/groupby_ppr.q.out
index a15b557..a4e9ff3 100644
--- a/ql/src/test/results/clientpositive/groupby_ppr.q.out
+++ b/ql/src/test/results/clientpositive/groupby_ppr.q.out
@@ -61,7 +61,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -107,7 +107,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
index 117b2cd..33d1ed0 100644
--- a/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
+++ b/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
@@ -61,7 +61,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -107,7 +107,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/input23.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input23.q.out b/ql/src/test/results/clientpositive/input23.q.out
index e03c9e7..25225d7 100644
--- a/ql/src/test/results/clientpositive/input23.q.out
+++ b/ql/src/test/results/clientpositive/input23.q.out
@@ -59,7 +59,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/input42.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input42.q.out b/ql/src/test/results/clientpositive/input42.q.out
index 866468d..8e91af0 100644
--- a/ql/src/test/results/clientpositive/input42.q.out
+++ b/ql/src/test/results/clientpositive/input42.q.out
@@ -23,7 +23,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -67,7 +67,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1148,7 +1148,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1192,7 +1192,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1655,7 +1655,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1699,7 +1699,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/input_part1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input_part1.q.out b/ql/src/test/results/clientpositive/input_part1.q.out
index d3efb0d..16c450b 100644
--- a/ql/src/test/results/clientpositive/input_part1.q.out
+++ b/ql/src/test/results/clientpositive/input_part1.q.out
@@ -83,7 +83,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/input_part2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input_part2.q.out b/ql/src/test/results/clientpositive/input_part2.q.out
index 74db456..0c069a5 100644
--- a/ql/src/test/results/clientpositive/input_part2.q.out
+++ b/ql/src/test/results/clientpositive/input_part2.q.out
@@ -143,7 +143,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -189,7 +189,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/input_part7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input_part7.q.out b/ql/src/test/results/clientpositive/input_part7.q.out
index 1429080..459e384 100644
--- a/ql/src/test/results/clientpositive/input_part7.q.out
+++ b/ql/src/test/results/clientpositive/input_part7.q.out
@@ -84,7 +84,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -130,7 +130,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/input_part9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input_part9.q.out b/ql/src/test/results/clientpositive/input_part9.q.out
index 920096e..f73d0e1 100644
--- a/ql/src/test/results/clientpositive/input_part9.q.out
+++ b/ql/src/test/results/clientpositive/input_part9.q.out
@@ -23,7 +23,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -67,7 +67,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/join17.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join17.q.out b/ql/src/test/results/clientpositive/join17.q.out
index 2c03584..a827c67 100644
--- a/ql/src/test/results/clientpositive/join17.q.out
+++ b/ql/src/test/results/clientpositive/join17.q.out
@@ -78,7 +78,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -98,7 +98,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/join26.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join26.q.out b/ql/src/test/results/clientpositive/join26.q.out
index 86e51fb..781c0e5 100644
--- a/ql/src/test/results/clientpositive/join26.q.out
+++ b/ql/src/test/results/clientpositive/join26.q.out
@@ -146,7 +146,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/join32.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join32.q.out b/ql/src/test/results/clientpositive/join32.q.out
index 8653c2f..bebb007 100644
--- a/ql/src/test/results/clientpositive/join32.q.out
+++ b/ql/src/test/results/clientpositive/join32.q.out
@@ -159,7 +159,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -179,7 +179,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -203,7 +203,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -223,7 +223,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -250,7 +250,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/join32_lessSize.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join32_lessSize.q.out b/ql/src/test/results/clientpositive/join32_lessSize.q.out
index fd7bba7..357a84f 100644
--- a/ql/src/test/results/clientpositive/join32_lessSize.q.out
+++ b/ql/src/test/results/clientpositive/join32_lessSize.q.out
@@ -121,7 +121,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -141,7 +141,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -168,7 +168,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -314,7 +314,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -334,7 +334,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -605,7 +605,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -625,7 +625,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -734,7 +734,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -754,7 +754,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -881,7 +881,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -901,7 +901,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1170,7 +1170,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1190,7 +1190,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1214,7 +1214,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1234,7 +1234,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1270,7 +1270,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1410,7 +1410,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1696,7 +1696,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1716,7 +1716,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1740,7 +1740,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1760,7 +1760,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1796,7 +1796,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1936,7 +1936,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'


[15/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.8.out b/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.8.out
deleted file mode 100644
index 9947c1a..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.8.out
+++ /dev/null
@@ -1,712 +0,0 @@
-PREHOOK: query: -- list bucketing alter table ... concatenate: 
--- Use list bucketing DML to generate mutilple files in partitions by turning off merge
--- dynamic partition. multiple skewed columns. merge.
--- The following explains merge example used in this test case
--- DML will generated 2 partitions
--- ds=2008-04-08/hr=a1
--- ds=2008-04-08/hr=b1
--- without merge, each partition has more files
--- ds=2008-04-08/hr=a1 has 2 files
--- ds=2008-04-08/hr=b1 has 6 files
--- with merge each partition has more files
--- ds=2008-04-08/hr=a1 has 1 files
--- ds=2008-04-08/hr=b1 has 4 files
--- The following shows file size and name in each directory
--- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 155 000000_0
--- 155 000001_0
--- with merge
--- 254 000000_0
--- hr=b1/key=103/value=val_103:
--- without merge
--- 99 000000_0
--- 99 000001_0
--- with merge
--- 142 000001_0
--- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 5181 000000_0
--- 5181 000001_0
--- with merge
--- 5181 000000_0
--- 5181 000001_0
--- hr=b1/key=484/value=val_484
--- without merge
--- 87 000000_0
--- 87 000001_0
--- with merge
--- 118 000002_0 
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- list bucketing alter table ... concatenate: 
--- Use list bucketing DML to generate mutilple files in partitions by turning off merge
--- dynamic partition. multiple skewed columns. merge.
--- The following explains merge example used in this test case
--- DML will generated 2 partitions
--- ds=2008-04-08/hr=a1
--- ds=2008-04-08/hr=b1
--- without merge, each partition has more files
--- ds=2008-04-08/hr=a1 has 2 files
--- ds=2008-04-08/hr=b1 has 6 files
--- with merge each partition has more files
--- ds=2008-04-08/hr=a1 has 1 files
--- ds=2008-04-08/hr=b1 has 4 files
--- The following shows file size and name in each directory
--- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 155 000000_0
--- 155 000001_0
--- with merge
--- 254 000000_0
--- hr=b1/key=103/value=val_103:
--- without merge
--- 99 000000_0
--- 99 000001_0
--- with merge
--- 142 000001_0
--- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 5181 000000_0
--- 5181 000001_0
--- with merge
--- 5181 000000_0
--- 5181 000001_0
--- hr=b1/key=484/value=val_484
--- without merge
--- 87 000000_0
--- 87 000001_0
--- with merge
--- 118 000002_0 
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_dynamic_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            srcpart
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_dynamic_part
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-         TOK_SELEXPR
-            TOK_FUNCTION
-               if
-               ==
-                  %
-                     TOK_TABLE_OR_COL
-                        key
-                     100
-                  0
-               'a1'
-               'b1'
-      TOK_WHERE
-         =
-            TOK_TABLE_OR_COL
-               ds
-            '2008-04-08'
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
-              outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_dynamic_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_dynamic_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [$hdt$_0:srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [$hdt$_0:srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
-POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-ds=2008-04-08/hr=a1
-ds=2008-04-08/hr=b1
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, a1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	2                   
-	numRows             	16                  
-	rawDataSize         	136                 
-	totalSize           	310                 
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, b1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	6                   
-	numRows             	984                 
-	rawDataSize         	9488                
-	totalSize           	10734               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: -- concatenate the partition and it will merge files
-alter table list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1') concatenate
-PREHOOK: type: ALTER_PARTITION_MERGE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-POSTHOOK: query: -- concatenate the partition and it will merge files
-alter table list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1') concatenate
-POSTHOOK: type: ALTER_PARTITION_MERGE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, b1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	3                   
-	numRows             	0                   
-	rawDataSize         	0                   
-	totalSize           	10586               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(*) from list_bucketing_dynamic_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from list_bucketing_dynamic_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-1000
-PREHOOK: query: explain extended
-select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            list_bucketing_dynamic_part
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_ALLCOLREF
-      TOK_WHERE
-         and
-            =
-               TOK_TABLE_OR_COL
-                  key
-               '484'
-            =
-               TOK_TABLE_OR_COL
-                  value
-               'val_484'
-
-
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr a1
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              numFiles 2
-              numRows 16
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 136
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 310
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr b1
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              numFiles 3
-              numRows 0
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 0
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 10586
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_dynamic_part
-          Statistics: Num rows: 16 Data size: 136 Basic stats: PARTIAL Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
-            Statistics: Num rows: 4 Data size: 34 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 4 Data size: 34 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-484	val_484	2008-04-08	b1
-484	val_484	2008-04-08	b1
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' order by hr
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' order by hr
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	12
-PREHOOK: query: -- clean up
-drop table list_bucketing_dynamic_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- clean up
-drop table list_bucketing_dynamic_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Output: default@list_bucketing_dynamic_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_8.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_8.q.out
new file mode 100644
index 0000000..ee36d3f
--- /dev/null
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_8.q.out
@@ -0,0 +1,639 @@
+PREHOOK: query: -- list bucketing alter table ... concatenate: 
+-- Use list bucketing DML to generate mutilple files in partitions by turning off merge
+-- dynamic partition. multiple skewed columns. merge.
+-- The following explains merge example used in this test case
+-- DML will generated 2 partitions
+-- ds=2008-04-08/hr=a1
+-- ds=2008-04-08/hr=b1
+-- without merge, each partition has more files
+-- ds=2008-04-08/hr=a1 has 2 files
+-- ds=2008-04-08/hr=b1 has 6 files
+-- with merge each partition has more files
+-- ds=2008-04-08/hr=a1 has 1 files
+-- ds=2008-04-08/hr=b1 has 4 files
+-- The following shows file size and name in each directory
+-- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+-- without merge
+-- 155 000000_0
+-- 155 000001_0
+-- with merge
+-- 254 000000_0
+-- hr=b1/key=103/value=val_103:
+-- without merge
+-- 99 000000_0
+-- 99 000001_0
+-- with merge
+-- 142 000001_0
+-- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+-- without merge
+-- 5181 000000_0
+-- 5181 000001_0
+-- with merge
+-- 5181 000000_0
+-- 5181 000001_0
+-- hr=b1/key=484/value=val_484
+-- without merge
+-- 87 000000_0
+-- 87 000001_0
+-- with merge
+-- 118 000002_0 
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+
+-- create a skewed table
+create table list_bucketing_dynamic_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- list bucketing alter table ... concatenate: 
+-- Use list bucketing DML to generate mutilple files in partitions by turning off merge
+-- dynamic partition. multiple skewed columns. merge.
+-- The following explains merge example used in this test case
+-- DML will generated 2 partitions
+-- ds=2008-04-08/hr=a1
+-- ds=2008-04-08/hr=b1
+-- without merge, each partition has more files
+-- ds=2008-04-08/hr=a1 has 2 files
+-- ds=2008-04-08/hr=b1 has 6 files
+-- with merge each partition has more files
+-- ds=2008-04-08/hr=a1 has 1 files
+-- ds=2008-04-08/hr=b1 has 4 files
+-- The following shows file size and name in each directory
+-- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+-- without merge
+-- 155 000000_0
+-- 155 000001_0
+-- with merge
+-- 254 000000_0
+-- hr=b1/key=103/value=val_103:
+-- without merge
+-- 99 000000_0
+-- 99 000001_0
+-- with merge
+-- 142 000001_0
+-- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+-- without merge
+-- 5181 000000_0
+-- 5181 000001_0
+-- with merge
+-- 5181 000000_0
+-- 5181 000001_0
+-- hr=b1/key=484/value=val_484
+-- without merge
+-- 87 000000_0
+-- 87 000001_0
+-- with merge
+-- 118 000002_0 
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+
+-- create a skewed table
+create table list_bucketing_dynamic_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@list_bucketing_dynamic_part
+PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/
+                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_dynamic_part
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_dynamic_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /srcpart/ds=2008-04-08/hr=11 [srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [srcpart]
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
+POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_dynamic_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_dynamic_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+ds=2008-04-08/hr=a1
+ds=2008-04-08/hr=b1
+PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, a1]    	 
+Database:           	default             	 
+Table:              	list_bucketing_dynamic_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	2                   
+	numRows             	16                  
+	rawDataSize         	136                 
+	totalSize           	310                 
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, b1]    	 
+Database:           	default             	 
+Table:              	list_bucketing_dynamic_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	6                   
+	numRows             	984                 
+	rawDataSize         	9488                
+	totalSize           	10734               
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: -- concatenate the partition and it will merge files
+alter table list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1') concatenate
+PREHOOK: type: ALTER_PARTITION_MERGE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+POSTHOOK: query: -- concatenate the partition and it will merge files
+alter table list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1') concatenate
+POSTHOOK: type: ALTER_PARTITION_MERGE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, b1]    	 
+Database:           	default             	 
+Table:              	list_bucketing_dynamic_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	numFiles            	3                   
+	numRows             	984                 
+	rawDataSize         	9488                
+	totalSize           	10586               
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+1000
+PREHOOK: query: select count(*) from list_bucketing_dynamic_part
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_dynamic_part
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from list_bucketing_dynamic_part
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+#### A masked pattern was here ####
+1000
+PREHOOK: query: explain extended
+select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr a1
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_dynamic_part
+              numFiles 2
+              numRows 16
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 136
+              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 310
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+            name: default.list_bucketing_dynamic_part
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr b1
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_dynamic_part
+              numFiles 3
+              numRows 984
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 9488
+              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 10586
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+            name: default.list_bucketing_dynamic_part
+      Processor Tree:
+        TableScan
+          alias: list_bucketing_dynamic_part
+          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
+            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+              ListSink
+
+PREHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_dynamic_part
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+#### A masked pattern was here ####
+484	val_484	2008-04-08	b1
+484	val_484	2008-04-08	b1
+PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' order by hr
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' order by hr
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+484	val_484	2008-04-08	11
+484	val_484	2008-04-08	12
+PREHOOK: query: -- clean up
+drop table list_bucketing_dynamic_part
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+PREHOOK: Output: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- clean up
+drop table list_bucketing_dynamic_part
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: Output: default@list_bucketing_dynamic_part


[29/34] hive git commit: HIVE-13860: Fix more json related JDK8 test failures (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
HIVE-13860: Fix more json related JDK8 test failures (Mohit Sabharwal, reviewed by Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f38a42e5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f38a42e5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f38a42e5

Branch: refs/heads/master
Commit: f38a42e52e28618035c05a15c349594ae814748d
Parents: cdb872a
Author: Mohit Sabharwal <mo...@cloudera.com>
Authored: Fri May 27 10:35:16 2016 -0500
Committer: Sergio Pena <se...@cloudera.com>
Committed: Fri May 27 21:08:31 2016 -0500

----------------------------------------------------------------------
 .../clientpositive/autoColumnStats_1.q.out      | 28 +++---
 .../clientpositive/autoColumnStats_2.q.out      | 24 ++---
 .../clientpositive/autoColumnStats_3.q.out      | 10 +-
 .../clientpositive/autoColumnStats_4.q.out      |  2 +-
 .../clientpositive/autoColumnStats_5.q.out      |  8 +-
 .../clientpositive/autoColumnStats_8.q.out      |  8 +-
 .../clientpositive/autoColumnStats_9.q.out      |  2 +-
 .../clientpositive/binary_output_format.q.out   |  4 +-
 .../results/clientpositive/json_serde1.q.out    |  4 +-
 .../results/clientpositive/orc_create.q.out     | 12 +--
 .../clientpositive/orc_int_type_promotion.q.out |  6 +-
 .../results/clientpositive/perf/query85.q.out   |  2 +-
 .../results/clientpositive/perf/query89.q.out   |  2 +-
 .../results/clientpositive/perf/query91.q.out   |  2 +-
 .../results/clientpositive/spark/bucket5.q.out  |  8 +-
 .../results/clientpositive/spark/join0.q.out    |  2 +-
 .../clientpositive/spark/outer_join_ppr.q.out   |  4 +-
 .../spark/reduce_deduplicate.q.out              |  4 +-
 .../clientpositive/spark/union_ppr.q.out        |  8 +-
 .../clientpositive/stats_list_bucket.q.out      |  2 +-
 .../results/clientpositive/tez/bucket2.q.out    |  4 +-
 .../clientpositive/udaf_collect_set_2.q.out     | 96 ++++++++++----------
 .../results/clientpositive/udf_sort_array.q.out |  2 +-
 .../clientpositive/vector_complex_all.q.out     |  6 +-
 .../results/clientpositive/vector_udf1.q.out    |  2 +-
 25 files changed, 126 insertions(+), 126 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/autoColumnStats_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/autoColumnStats_1.q.out b/ql/src/test/results/clientpositive/autoColumnStats_1.q.out
index e290e52..4cf6df1 100644
--- a/ql/src/test/results/clientpositive/autoColumnStats_1.q.out
+++ b/ql/src/test/results/clientpositive/autoColumnStats_1.q.out
@@ -60,7 +60,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	5312                
@@ -137,7 +137,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	5312                
@@ -172,7 +172,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	5312                
@@ -257,7 +257,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	5312                
@@ -292,7 +292,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	5312                
@@ -351,7 +351,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	508                 
 	rawDataSize         	5400                
@@ -564,7 +564,7 @@ Database:           	default
 Table:              	alter5              	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"col1\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col1\":\"true\"}}
 	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	1406                
@@ -648,7 +648,7 @@ Database:           	default
 Table:              	src_stat_part       	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	5                   
 	rawDataSize         	38                  
@@ -699,7 +699,7 @@ Database:           	default
 Table:              	src_stat_part       	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	25                  
 	rawDataSize         	191                 
@@ -838,7 +838,7 @@ Database:           	default
 Table:              	tab_part            	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	4                   
 	numRows             	500                 
 	rawDataSize         	5312                
@@ -899,7 +899,7 @@ Database:           	default
 Table:              	tab                 	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	2                   
 	numRows             	242                 
 	rawDataSize         	2566                
@@ -1023,7 +1023,7 @@ Database:           	default
 Table:              	nzhang_part14       	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	2                   
 	rawDataSize         	6                   
@@ -1086,7 +1086,7 @@ Database:           	default
 Table:              	nzhang_part14       	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	1000                
 	rawDataSize         	10624               
@@ -1167,7 +1167,7 @@ Database:           	default
 Table:              	nzhang_part14       	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	1000                
 	rawDataSize         	10624               

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/autoColumnStats_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/autoColumnStats_2.q.out b/ql/src/test/results/clientpositive/autoColumnStats_2.q.out
index a76bf5f..791e6ae 100644
--- a/ql/src/test/results/clientpositive/autoColumnStats_2.q.out
+++ b/ql/src/test/results/clientpositive/autoColumnStats_2.q.out
@@ -60,7 +60,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	5312                
@@ -173,7 +173,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	5312                
@@ -208,7 +208,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	2                   
 	numRows             	1000                
 	rawDataSize         	10624               
@@ -341,7 +341,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	508                 
 	rawDataSize         	5400                
@@ -561,7 +561,7 @@ Database:           	default
 Table:              	alter5              	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"col1\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col1\":\"true\"}}
 	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	1406                
@@ -769,7 +769,7 @@ Database:           	default
 Table:              	src_stat_part       	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	5                   
 	rawDataSize         	38                  
@@ -820,7 +820,7 @@ Database:           	default
 Table:              	src_stat_part       	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	25                  
 	rawDataSize         	191                 
@@ -959,7 +959,7 @@ Database:           	default
 Table:              	tab_part            	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	4                   
 	numRows             	500                 
 	rawDataSize         	5312                
@@ -1020,7 +1020,7 @@ Database:           	default
 Table:              	tab                 	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	2                   
 	numRows             	242                 
 	rawDataSize         	2566                
@@ -1144,7 +1144,7 @@ Database:           	default
 Table:              	nzhang_part14       	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	2                   
 	rawDataSize         	6                   
@@ -1207,7 +1207,7 @@ Database:           	default
 Table:              	nzhang_part14       	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	1000                
 	rawDataSize         	10624               
@@ -1288,7 +1288,7 @@ Database:           	default
 Table:              	nzhang_part14       	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	1000                
 	rawDataSize         	10624               

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/autoColumnStats_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/autoColumnStats_3.q.out b/ql/src/test/results/clientpositive/autoColumnStats_3.q.out
index ee41910..dca158b 100644
--- a/ql/src/test/results/clientpositive/autoColumnStats_3.q.out
+++ b/ql/src/test/results/clientpositive/autoColumnStats_3.q.out
@@ -36,7 +36,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\"}}
 	numFiles            	0                   
 	numRows             	0                   
 	rawDataSize         	0                   
@@ -81,7 +81,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	5312                
@@ -251,7 +251,7 @@ Database:           	default
 Table:              	nzhang_part14       	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	2                   
 	rawDataSize         	6                   
@@ -361,7 +361,7 @@ Database:           	default
 Table:              	nzhang_part14       	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	2                   
 	numRows             	4                   
 	rawDataSize         	12                  
@@ -401,7 +401,7 @@ Database:           	default
 Table:              	nzhang_part14       	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	2                   
 	numRows             	4                   
 	rawDataSize         	16                  

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/autoColumnStats_4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/autoColumnStats_4.q.out b/ql/src/test/results/clientpositive/autoColumnStats_4.q.out
index 676a27a..50d988d 100644
--- a/ql/src/test/results/clientpositive/autoColumnStats_4.q.out
+++ b/ql/src/test/results/clientpositive/autoColumnStats_4.q.out
@@ -196,7 +196,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"}}
 	numFiles            	2                   
 	numRows             	10                  
 	rawDataSize         	0                   

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/autoColumnStats_5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/autoColumnStats_5.q.out b/ql/src/test/results/clientpositive/autoColumnStats_5.q.out
index 04ed3ce..c2153fb 100644
--- a/ql/src/test/results/clientpositive/autoColumnStats_5.q.out
+++ b/ql/src/test/results/clientpositive/autoColumnStats_5.q.out
@@ -185,7 +185,7 @@ Database:           	default
 Table:              	partitioned1        	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	40                  
@@ -245,7 +245,7 @@ Database:           	default
 Table:              	partitioned1        	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	40                  
@@ -431,7 +431,7 @@ Database:           	default
 Table:              	partitioned1        	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	56                  
@@ -625,7 +625,7 @@ Database:           	default
 Table:              	partitioned1        	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	2                   
 	numRows             	6                   
 	rawDataSize         	78                  

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/autoColumnStats_8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/autoColumnStats_8.q.out b/ql/src/test/results/clientpositive/autoColumnStats_8.q.out
index 5b74d2d..5d9e5ab 100644
--- a/ql/src/test/results/clientpositive/autoColumnStats_8.q.out
+++ b/ql/src/test/results/clientpositive/autoColumnStats_8.q.out
@@ -197,7 +197,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -243,7 +243,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -289,7 +289,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -335,7 +335,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/autoColumnStats_9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/autoColumnStats_9.q.out b/ql/src/test/results/clientpositive/autoColumnStats_9.q.out
index 4a7b2b7..da8b19c 100644
--- a/ql/src/test/results/clientpositive/autoColumnStats_9.q.out
+++ b/ql/src/test/results/clientpositive/autoColumnStats_9.q.out
@@ -231,7 +231,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	137                 
 	numRows             	855                 
 	rawDataSize         	9143                

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/binary_output_format.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/binary_output_format.q.out b/ql/src/test/results/clientpositive/binary_output_format.q.out
index 51328e2..f3c624c 100644
--- a/ql/src/test/results/clientpositive/binary_output_format.q.out
+++ b/ql/src/test/results/clientpositive/binary_output_format.q.out
@@ -129,7 +129,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -149,7 +149,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/json_serde1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/json_serde1.q.out b/ql/src/test/results/clientpositive/json_serde1.q.out
index 6235aff..e14d674 100644
--- a/ql/src/test/results/clientpositive/json_serde1.q.out
+++ b/ql/src/test/results/clientpositive/json_serde1.q.out
@@ -93,8 +93,8 @@ POSTHOOK: query: select * from json_serde1_2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@json_serde1_2
 #### A masked pattern was here ####
-[3,2,1]	{1:"2001-01-01",2:null}	{"c1":123456,"c2":"hello","c3":["aa","bb","cc"],"c4":{"xyz":456,"abc":123},"c5":{"c5_1":"bye","c5_2":88}}
-[3,2,1]	{1:"2001-01-01",2:null}	{"c1":123456,"c2":"hello","c3":["aa","bb","cc"],"c4":{"xyz":456,"abc":123},"c5":{"c5_1":"bye","c5_2":88}}
+[3,2,1]	{1:"2001-01-01",2:null}	{"c1":123456,"c2":"hello","c3":["aa","bb","cc"],"c4":{"abc":123,"xyz":456},"c5":{"c5_1":"bye","c5_2":88}}
+[3,2,1]	{1:"2001-01-01",2:null}	{"c1":123456,"c2":"hello","c3":["aa","bb","cc"],"c4":{"abc":123,"xyz":456},"c5":{"c5_1":"bye","c5_2":88}}
 PREHOOK: query: drop table json_serde1_1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@json_serde1_1

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/orc_create.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_create.q.out b/ql/src/test/results/clientpositive/orc_create.q.out
index 34ab00d..40d127c 100644
--- a/ql/src/test/results/clientpositive/orc_create.q.out
+++ b/ql/src/test/results/clientpositive/orc_create.q.out
@@ -380,9 +380,9 @@ POSTHOOK: query: SELECT * from orc_create_complex
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_create_complex
 #### A masked pattern was here ####
-line1	{"key13":"value13","key12":"value12","key11":"value11"}	["a","b","c"]	{"A":"one","B":"two"}
-line2	{"key21":"value21","key23":"value23","key22":"value22"}	["d","e","f"]	{"A":"three","B":"four"}
-line3	{"key33":"value33","key31":"value31","key32":"value32"}	["g","h","i"]	{"A":"five","B":"six"}
+line1	{"key13":"value13","key11":"value11","key12":"value12"}	["a","b","c"]	{"A":"one","B":"two"}
+line2	{"key21":"value21","key22":"value22","key23":"value23"}	["d","e","f"]	{"A":"three","B":"four"}
+line3	{"key31":"value31","key32":"value32","key33":"value33"}	["g","h","i"]	{"A":"five","B":"six"}
 PREHOOK: query: SELECT str from orc_create_complex
 PREHOOK: type: QUERY
 PREHOOK: Input: default@orc_create_complex
@@ -402,9 +402,9 @@ POSTHOOK: query: SELECT mp from orc_create_complex
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_create_complex
 #### A masked pattern was here ####
-{"key13":"value13","key12":"value12","key11":"value11"}
-{"key21":"value21","key23":"value23","key22":"value22"}
-{"key33":"value33","key31":"value31","key32":"value32"}
+{"key13":"value13","key11":"value11","key12":"value12"}
+{"key21":"value21","key22":"value22","key23":"value23"}
+{"key31":"value31","key32":"value32","key33":"value33"}
 PREHOOK: query: SELECT lst from orc_create_complex
 PREHOOK: type: QUERY
 PREHOOK: Input: default@orc_create_complex

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out b/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out
index 3b2e962..d3837a3 100644
--- a/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out
+++ b/ql/src/test/results/clientpositive/orc_int_type_promotion.q.out
@@ -127,7 +127,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypes_orc
 #### A masked pattern was here ####
 true	10	100	1000	10000	4.0	20.0	4.222	1969-12-31 15:59:58.174	1970-01-01	string	hello	hello	{"k2":"v2","k1":"v1"}	[100,200]	{"c1":null,"c2":" \"foo\"}"}
-false	20	200	2000	20000	8.0	40.0	2.222	1970-12-31 15:59:58.174	1971-01-01	abcd	world	world	{"k4":"v4","k3":"v3"}	[200,300]	{"c1":null,"c2":" \"bar\"}"}
+false	20	200	2000	20000	8.0	40.0	2.222	1970-12-31 15:59:58.174	1971-01-01	abcd	world	world	{"k3":"v3","k4":"v4"}	[200,300]	{"c1":null,"c2":" \"bar\"}"}
 PREHOOK: query: alter table alltypes_orc change si si int
 PREHOOK: type: ALTERTABLE_RENAMECOL
 PREHOOK: Input: default@alltypes_orc
@@ -145,7 +145,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypes_orc
 #### A masked pattern was here ####
 true	10	100	1000	10000	4.0	20.0	4.222	1969-12-31 15:59:58.174	1970-01-01	string	hello	hello	{"k2":"v2","k1":"v1"}	[100,200]	{"c1":null,"c2":" \"foo\"}"}
-false	20	200	2000	20000	8.0	40.0	2.222	1970-12-31 15:59:58.174	1971-01-01	abcd	world	world	{"k4":"v4","k3":"v3"}	[200,300]	{"c1":null,"c2":" \"bar\"}"}
+false	20	200	2000	20000	8.0	40.0	2.222	1970-12-31 15:59:58.174	1971-01-01	abcd	world	world	{"k3":"v3","k4":"v4"}	[200,300]	{"c1":null,"c2":" \"bar\"}"}
 PREHOOK: query: alter table alltypes_orc change si si bigint
 PREHOOK: type: ALTERTABLE_RENAMECOL
 PREHOOK: Input: default@alltypes_orc
@@ -171,7 +171,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypes_orc
 #### A masked pattern was here ####
 true	10	100	1000	10000	4.0	20.0	4.222	1969-12-31 15:59:58.174	1970-01-01	string	hello	hello	{"k2":"v2","k1":"v1"}	[100,200]	{"c1":null,"c2":" \"foo\"}"}
-false	20	200	2000	20000	8.0	40.0	2.222	1970-12-31 15:59:58.174	1971-01-01	abcd	world	world	{"k4":"v4","k3":"v3"}	[200,300]	{"c1":null,"c2":" \"bar\"}"}
+false	20	200	2000	20000	8.0	40.0	2.222	1970-12-31 15:59:58.174	1971-01-01	abcd	world	world	{"k3":"v3","k4":"v4"}	[200,300]	{"c1":null,"c2":" \"bar\"}"}
 PREHOOK: query: explain select ti, si, i, bi from alltypes_orc
 PREHOOK: type: QUERY
 POSTHOOK: query: explain select ti, si, i, bi from alltypes_orc

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/perf/query85.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query85.q.out b/ql/src/test/results/clientpositive/perf/query85.q.out
index 0ae13e3..ca23bbb 100644
--- a/ql/src/test/results/clientpositive/perf/query85.q.out
+++ b/ql/src/test/results/clientpositive/perf/query85.q.out
@@ -114,7 +114,7 @@ Stage-0
                                                         Select Operator [SEL_17] (rows=1583 width=204)
                                                           Output:["_col0","_col1","_col2"]
                                                           Filter Operator [FIL_100] (rows=1583 width=204)
-                                                            predicate:(((cd_education_status = '4 yr Degree') or (cd_education_status = 'Primary') or (cd_education_status = 'Advanced Degree')) and ((cd_marital_status = 'M') or (cd_marital_status = 'D') or (cd_marital_status = 'U')) and cd_demo_sk is not null and cd_education_status is not null and cd_marital_status is not null)
+                                                            predicate:(((cd_education_status = '4 yr Degree') or (cd_education_status = 'Primary') or (cd_education_status = 'Advanced Degree')) and ((cd_marital_status = 'M') or (cd_marital_status = 'D') or (cd_marital_status = 'U')) and cd_demo_sk is not null and cd_marital_status is not null and cd_education_status is not null)
                                                             TableScan [TS_15] (rows=1583 width=204)
                                                               default@customer_demographics,cd1,Tbl:COMPLETE,Col:NONE,Output:["cd_demo_sk","cd_marital_status","cd_education_status"]
                                                     <-Reducer 11 [SIMPLE_EDGE]

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/perf/query89.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query89.q.out b/ql/src/test/results/clientpositive/perf/query89.q.out
index 8e6cc49..c80f06c 100644
--- a/ql/src/test/results/clientpositive/perf/query89.q.out
+++ b/ql/src/test/results/clientpositive/perf/query89.q.out
@@ -143,7 +143,7 @@ Stage-0
                                                 Select Operator [SEL_5] (rows=231000 width=1436)
                                                   Output:["_col0","_col1","_col2","_col3"]
                                                   Filter Operator [FIL_48] (rows=231000 width=1436)
-                                                    predicate:(((i_category) IN ('Home', 'Books', 'Electronics') or (i_category) IN ('Shoes', 'Jewelry', 'Men')) and ((i_class) IN ('wallpaper', 'parenting', 'musical') or (i_class) IN ('womens', 'birdal', 'pants')) and (((i_category) IN ('Home', 'Books', 'Electronics') and (i_class) IN ('wallpaper', 'parenting', 'musical')) or ((i_category) IN ('Shoes', 'Jewelry', 'Men') and (i_class) IN ('womens', 'birdal', 'pants'))) and i_item_sk is not null)
+                                                    predicate:(((i_class) IN ('wallpaper', 'parenting', 'musical') or (i_class) IN ('womens', 'birdal', 'pants')) and ((i_category) IN ('Home', 'Books', 'Electronics') or (i_category) IN ('Shoes', 'Jewelry', 'Men')) and (((i_category) IN ('Home', 'Books', 'Electronics') and (i_class) IN ('wallpaper', 'parenting', 'musical')) or ((i_category) IN ('Shoes', 'Jewelry', 'Men') and (i_class) IN ('womens', 'birdal', 'pants'))) and i_item_sk is not null)
                                                     TableScan [TS_3] (rows=462000 width=1436)
                                                       default@item,item,Tbl:COMPLETE,Col:NONE,Output:["i_item_sk","i_brand","i_class","i_category"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/perf/query91.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/query91.q.out b/ql/src/test/results/clientpositive/perf/query91.q.out
index fa9165f..81f37c5 100644
--- a/ql/src/test/results/clientpositive/perf/query91.q.out
+++ b/ql/src/test/results/clientpositive/perf/query91.q.out
@@ -43,7 +43,7 @@ Stage-0
                           Select Operator [SEL_27] (rows=790 width=204)
                             Output:["_col0","_col1","_col2"]
                             Filter Operator [FIL_79] (rows=790 width=204)
-                              predicate:(((cd_marital_status = 'M') or (cd_marital_status = 'W')) and ((cd_education_status = 'Unknown') or (cd_education_status = 'Advanced Degree')) and (((cd_marital_status = 'M') and (cd_education_status = 'Unknown')) or ((cd_marital_status = 'W') and (cd_education_status = 'Advanced Degree'))) and cd_demo_sk is not null)
+                              predicate:(((cd_education_status = 'Unknown') or (cd_education_status = 'Advanced Degree')) and ((cd_marital_status = 'M') or (cd_marital_status = 'W')) and (((cd_marital_status = 'M') and (cd_education_status = 'Unknown')) or ((cd_marital_status = 'W') and (cd_education_status = 'Advanced Degree'))) and cd_demo_sk is not null)
                               TableScan [TS_25] (rows=1583 width=204)
                                 default@customer_demographics,customer_demographics,Tbl:COMPLETE,Col:NONE,Output:["cd_demo_sk","cd_marital_status","cd_education_status"]
                       <-Reducer 4 [SIMPLE_EDGE]

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/spark/bucket5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket5.q.out b/ql/src/test/results/clientpositive/spark/bucket5.q.out
index a78fae0..b5d8890 100644
--- a/ql/src/test/results/clientpositive/spark/bucket5.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket5.q.out
@@ -73,7 +73,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -93,7 +93,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -141,7 +141,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -161,7 +161,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/spark/join0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join0.q.out b/ql/src/test/results/clientpositive/spark/join0.q.out
index bc98bb4..3398ae7 100644
--- a/ql/src/test/results/clientpositive/spark/join0.q.out
+++ b/ql/src/test/results/clientpositive/spark/join0.q.out
@@ -28,7 +28,7 @@ STAGE PLANS:
     Spark
       Edges:
         Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 1), Map 4 (PARTITION-LEVEL SORT, 1)
-        Reducer 3 <- Reducer 2 (PARTITION-LEVEL SORT, 4)
+        Reducer 3 <- Reducer 2 (PARTITION-LEVEL SORT, 2)
 #### A masked pattern was here ####
       Vertices:
         Map 1 

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.out b/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.out
index dfa6ea5..360abc9 100644
--- a/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.out
+++ b/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.out
@@ -28,7 +28,7 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 4), Map 3 (PARTITION-LEVEL SORT, 4)
+        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Map 3 (PARTITION-LEVEL SORT, 2)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -427,7 +427,7 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 4), Map 3 (PARTITION-LEVEL SORT, 4)
+        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Map 3 (PARTITION-LEVEL SORT, 2)
 #### A masked pattern was here ####
       Vertices:
         Map 1 

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/spark/reduce_deduplicate.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/reduce_deduplicate.q.out b/ql/src/test/results/clientpositive/spark/reduce_deduplicate.q.out
index b20e8fe..1fc9d28 100644
--- a/ql/src/test/results/clientpositive/spark/reduce_deduplicate.q.out
+++ b/ql/src/test/results/clientpositive/spark/reduce_deduplicate.q.out
@@ -54,7 +54,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -74,7 +74,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/spark/union_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/union_ppr.q.out b/ql/src/test/results/clientpositive/spark/union_ppr.q.out
index 8f7b1f2..01747c6 100644
--- a/ql/src/test/results/clientpositive/spark/union_ppr.q.out
+++ b/ql/src/test/results/clientpositive/spark/union_ppr.q.out
@@ -68,7 +68,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -114,7 +114,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -191,7 +191,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -237,7 +237,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/stats_list_bucket.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats_list_bucket.q.out b/ql/src/test/results/clientpositive/stats_list_bucket.q.out
index c34c414..c66da97 100644
--- a/ql/src/test/results/clientpositive/stats_list_bucket.q.out
+++ b/ql/src/test/results/clientpositive/stats_list_bucket.q.out
@@ -168,7 +168,7 @@ Stored As SubDirectories:	Yes
 Skewed Columns:     	[c1, c2]            	 
 Skewed Values:      	[[466, val_466], [287, val_287], [82, val_82]]	 
 #### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[466, val_466]=/stats_list_bucket_1/c1=466/c2=val_466, [287, val_287]=/stats_list_bucket_1/c1=287/c2=val_287, [82, val_82]=/stats_list_bucket_1/c1=82/c2=val_82}	 
+Skewed Value to Truncated Path:	{[466, val_466]=/stats_list_bucket_1/c1=466/c2=val_466, [82, val_82]=/stats_list_bucket_1/c1=82/c2=val_82, [287, val_287]=/stats_list_bucket_1/c1=287/c2=val_287}	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
 PREHOOK: query: drop table stats_list_bucket

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/tez/bucket2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/bucket2.q.out b/ql/src/test/results/clientpositive/tez/bucket2.q.out
index 800edf3..e78a1a7 100644
--- a/ql/src/test/results/clientpositive/tez/bucket2.q.out
+++ b/ql/src/test/results/clientpositive/tez/bucket2.q.out
@@ -59,7 +59,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -79,7 +79,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/udaf_collect_set_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udaf_collect_set_2.q.out b/ql/src/test/results/clientpositive/udaf_collect_set_2.q.out
index 536234f..7425988 100644
--- a/ql/src/test/results/clientpositive/udaf_collect_set_2.q.out
+++ b/ql/src/test/results/clientpositive/udaf_collect_set_2.q.out
@@ -232,9 +232,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"name":"Chris","date":"2013-06-21","sub":{"\"bread\"":15.2,"\"juice\"":21.45}},{"name":"Chris","date":"2014-10-11","sub":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
-2	[{"name":"John","date":"2013-08-10","sub":{"\"beef\"":210.57,"\"yogurt\"":126.57}},{"name":"John","date":"2014-06-25","sub":{"\"chocolate\"":3.65,"\"water\"":420.36}},{"name":"John","date":"2015-01-15","sub":{"\"milk\"":27.45}}]
-3	[{"name":"Martin","date":"2014-05-11","sub":{"\"orange\"":41.35,"\"apple\"":30.5}},{"name":"Martin","date":"2014-12-12","sub":{"\"icecream\"":210.03,"\"coffee":500.0,"\"banana\"":100.56}}]
+1	[{"name":"Chris","date":"2013-06-21","sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"name":"Chris","date":"2014-10-11","sub":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
+2	[{"name":"John","date":"2013-08-10","sub":{"\"yogurt\"":126.57,"\"beef\"":210.57}},{"name":"John","date":"2014-06-25","sub":{"\"chocolate\"":3.65,"\"water\"":420.36}},{"name":"John","date":"2015-01-15","sub":{"\"milk\"":27.45}}]
+3	[{"name":"Martin","date":"2014-05-11","sub":{"\"apple\"":30.5,"\"orange\"":41.35}},{"name":"Martin","date":"2014-12-12","sub":{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}}]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(named_struct("name", c.name, "date", o.date, "sub", o.sub)))
 FROM customers c
 INNER JOIN nested_orders o
@@ -251,9 +251,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"name":"Chris","date":"2013-06-21","sub":{"\"bread\"":15.2,"\"juice\"":21.45}},{"name":"Chris","date":"2013-06-21","sub":{"\"bread\"":15.2,"\"juice\"":21.45}},{"name":"Chris","date":"2014-10-11","sub":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
-2	[{"name":"John","date":"2013-08-10","sub":{"\"beef\"":210.57,"\"yogurt\"":126.57}},{"name":"John","date":"2014-06-25","sub":{"\"chocolate\"":3.65,"\"water\"":420.36}},{"name":"John","date":"2015-01-15","sub":{"\"milk\"":27.45}}]
-3	[{"name":"Martin","date":"2014-05-11","sub":{"\"orange\"":41.35,"\"apple\"":30.5}},{"name":"Martin","date":"2014-12-12","sub":{"\"icecream\"":210.03,"\"coffee":500.0,"\"banana\"":100.56}}]
+1	[{"name":"Chris","date":"2013-06-21","sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"name":"Chris","date":"2013-06-21","sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"name":"Chris","date":"2014-10-11","sub":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
+2	[{"name":"John","date":"2013-08-10","sub":{"\"yogurt\"":126.57,"\"beef\"":210.57}},{"name":"John","date":"2014-06-25","sub":{"\"chocolate\"":3.65,"\"water\"":420.36}},{"name":"John","date":"2015-01-15","sub":{"\"milk\"":27.45}}]
+3	[{"name":"Martin","date":"2014-05-11","sub":{"\"apple\"":30.5,"\"orange\"":41.35}},{"name":"Martin","date":"2014-12-12","sub":{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}}]
 PREHOOK: query: SELECT c.id, sort_array(collect_set(struct(c.name, o.date, o.sub)))
 FROM customers c
 INNER JOIN nested_orders o
@@ -270,9 +270,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"col1":"Chris","col2":"2013-06-21","col3":{"\"bread\"":15.2,"\"juice\"":21.45}},{"col1":"Chris","col2":"2014-10-11","col3":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
-2	[{"col1":"John","col2":"2013-08-10","col3":{"\"beef\"":210.57,"\"yogurt\"":126.57}},{"col1":"John","col2":"2014-06-25","col3":{"\"chocolate\"":3.65,"\"water\"":420.36}},{"col1":"John","col2":"2015-01-15","col3":{"\"milk\"":27.45}}]
-3	[{"col1":"Martin","col2":"2014-05-11","col3":{"\"orange\"":41.35,"\"apple\"":30.5}},{"col1":"Martin","col2":"2014-12-12","col3":{"\"icecream\"":210.03,"\"coffee":500.0,"\"banana\"":100.56}}]
+1	[{"col1":"Chris","col2":"2013-06-21","col3":{"\"juice\"":21.45,"\"bread\"":15.2}},{"col1":"Chris","col2":"2014-10-11","col3":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
+2	[{"col1":"John","col2":"2013-08-10","col3":{"\"yogurt\"":126.57,"\"beef\"":210.57}},{"col1":"John","col2":"2014-06-25","col3":{"\"chocolate\"":3.65,"\"water\"":420.36}},{"col1":"John","col2":"2015-01-15","col3":{"\"milk\"":27.45}}]
+3	[{"col1":"Martin","col2":"2014-05-11","col3":{"\"apple\"":30.5,"\"orange\"":41.35}},{"col1":"Martin","col2":"2014-12-12","col3":{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}}]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(struct(c.name, o.date, o.sub)))
 FROM customers c
 INNER JOIN nested_orders o
@@ -289,9 +289,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"col1":"Chris","col2":"2013-06-21","col3":{"\"bread\"":15.2,"\"juice\"":21.45}},{"col1":"Chris","col2":"2013-06-21","col3":{"\"bread\"":15.2,"\"juice\"":21.45}},{"col1":"Chris","col2":"2014-10-11","col3":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
-2	[{"col1":"John","col2":"2013-08-10","col3":{"\"beef\"":210.57,"\"yogurt\"":126.57}},{"col1":"John","col2":"2014-06-25","col3":{"\"chocolate\"":3.65,"\"water\"":420.36}},{"col1":"John","col2":"2015-01-15","col3":{"\"milk\"":27.45}}]
-3	[{"col1":"Martin","col2":"2014-05-11","col3":{"\"orange\"":41.35,"\"apple\"":30.5}},{"col1":"Martin","col2":"2014-12-12","col3":{"\"icecream\"":210.03,"\"coffee":500.0,"\"banana\"":100.56}}]
+1	[{"col1":"Chris","col2":"2013-06-21","col3":{"\"juice\"":21.45,"\"bread\"":15.2}},{"col1":"Chris","col2":"2013-06-21","col3":{"\"juice\"":21.45,"\"bread\"":15.2}},{"col1":"Chris","col2":"2014-10-11","col3":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
+2	[{"col1":"John","col2":"2013-08-10","col3":{"\"yogurt\"":126.57,"\"beef\"":210.57}},{"col1":"John","col2":"2014-06-25","col3":{"\"chocolate\"":3.65,"\"water\"":420.36}},{"col1":"John","col2":"2015-01-15","col3":{"\"milk\"":27.45}}]
+3	[{"col1":"Martin","col2":"2014-05-11","col3":{"\"apple\"":30.5,"\"orange\"":41.35}},{"col1":"Martin","col2":"2014-12-12","col3":{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}}]
 PREHOOK: query: -- 1.3 when field is list
 
 SELECT c.id, sort_array(collect_set(named_struct("name", c.name, "date", o.date, "sub", map_values(o.sub))))
@@ -312,9 +312,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"name":"Chris","date":"2013-06-21","sub":[15.2,21.45]},{"name":"Chris","date":"2014-10-11","sub":[1200.5,29.36]}]
-2	[{"name":"John","date":"2013-08-10","sub":[210.57,126.57]},{"name":"John","date":"2014-06-25","sub":[3.65,420.36]},{"name":"John","date":"2015-01-15","sub":[27.45]}]
-3	[{"name":"Martin","date":"2014-05-11","sub":[41.35,30.5]},{"name":"Martin","date":"2014-12-12","sub":[210.03,500.0,100.56]}]
+1	[{"name":"Chris","date":"2013-06-21","sub":[21.45,15.2]},{"name":"Chris","date":"2014-10-11","sub":[1200.5,29.36]}]
+2	[{"name":"John","date":"2013-08-10","sub":[126.57,210.57]},{"name":"John","date":"2014-06-25","sub":[3.65,420.36]},{"name":"John","date":"2015-01-15","sub":[27.45]}]
+3	[{"name":"Martin","date":"2014-05-11","sub":[30.5,41.35]},{"name":"Martin","date":"2014-12-12","sub":[210.03,100.56,500.0]}]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(named_struct("name", c.name, "date", o.date, "sub", map_values(o.sub))))
 FROM customers c
 INNER JOIN nested_orders o
@@ -331,9 +331,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"name":"Chris","date":"2013-06-21","sub":[15.2,21.45]},{"name":"Chris","date":"2013-06-21","sub":[15.2,21.45]},{"name":"Chris","date":"2014-10-11","sub":[1200.5,29.36]}]
-2	[{"name":"John","date":"2013-08-10","sub":[210.57,126.57]},{"name":"John","date":"2014-06-25","sub":[3.65,420.36]},{"name":"John","date":"2015-01-15","sub":[27.45]}]
-3	[{"name":"Martin","date":"2014-05-11","sub":[41.35,30.5]},{"name":"Martin","date":"2014-12-12","sub":[210.03,500.0,100.56]}]
+1	[{"name":"Chris","date":"2013-06-21","sub":[21.45,15.2]},{"name":"Chris","date":"2013-06-21","sub":[21.45,15.2]},{"name":"Chris","date":"2014-10-11","sub":[1200.5,29.36]}]
+2	[{"name":"John","date":"2013-08-10","sub":[126.57,210.57]},{"name":"John","date":"2014-06-25","sub":[3.65,420.36]},{"name":"John","date":"2015-01-15","sub":[27.45]}]
+3	[{"name":"Martin","date":"2014-05-11","sub":[30.5,41.35]},{"name":"Martin","date":"2014-12-12","sub":[210.03,100.56,500.0]}]
 PREHOOK: query: SELECT c.id, sort_array(collect_set(struct(c.name, o.date, map_values(o.sub))))
 FROM customers c
 INNER JOIN nested_orders o
@@ -350,9 +350,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"col1":"Chris","col2":"2013-06-21","col3":[15.2,21.45]},{"col1":"Chris","col2":"2014-10-11","col3":[1200.5,29.36]}]
-2	[{"col1":"John","col2":"2013-08-10","col3":[210.57,126.57]},{"col1":"John","col2":"2014-06-25","col3":[3.65,420.36]},{"col1":"John","col2":"2015-01-15","col3":[27.45]}]
-3	[{"col1":"Martin","col2":"2014-05-11","col3":[41.35,30.5]},{"col1":"Martin","col2":"2014-12-12","col3":[210.03,500.0,100.56]}]
+1	[{"col1":"Chris","col2":"2013-06-21","col3":[21.45,15.2]},{"col1":"Chris","col2":"2014-10-11","col3":[1200.5,29.36]}]
+2	[{"col1":"John","col2":"2013-08-10","col3":[126.57,210.57]},{"col1":"John","col2":"2014-06-25","col3":[3.65,420.36]},{"col1":"John","col2":"2015-01-15","col3":[27.45]}]
+3	[{"col1":"Martin","col2":"2014-05-11","col3":[30.5,41.35]},{"col1":"Martin","col2":"2014-12-12","col3":[210.03,100.56,500.0]}]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(struct(c.name, o.date, map_values(o.sub))))
 FROM customers c
 INNER JOIN nested_orders o
@@ -369,9 +369,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"col1":"Chris","col2":"2013-06-21","col3":[15.2,21.45]},{"col1":"Chris","col2":"2013-06-21","col3":[15.2,21.45]},{"col1":"Chris","col2":"2014-10-11","col3":[1200.5,29.36]}]
-2	[{"col1":"John","col2":"2013-08-10","col3":[210.57,126.57]},{"col1":"John","col2":"2014-06-25","col3":[3.65,420.36]},{"col1":"John","col2":"2015-01-15","col3":[27.45]}]
-3	[{"col1":"Martin","col2":"2014-05-11","col3":[41.35,30.5]},{"col1":"Martin","col2":"2014-12-12","col3":[210.03,500.0,100.56]}]
+1	[{"col1":"Chris","col2":"2013-06-21","col3":[21.45,15.2]},{"col1":"Chris","col2":"2013-06-21","col3":[21.45,15.2]},{"col1":"Chris","col2":"2014-10-11","col3":[1200.5,29.36]}]
+2	[{"col1":"John","col2":"2013-08-10","col3":[126.57,210.57]},{"col1":"John","col2":"2014-06-25","col3":[3.65,420.36]},{"col1":"John","col2":"2015-01-15","col3":[27.45]}]
+3	[{"col1":"Martin","col2":"2014-05-11","col3":[30.5,41.35]},{"col1":"Martin","col2":"2014-12-12","col3":[210.03,100.56,500.0]}]
 PREHOOK: query: -- 2. test array
 
 -- 2.1 when field is primitive
@@ -480,9 +480,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[[{"\"bread\"":15.2,"\"juice\"":21.45}],[{"\"grape\"":1200.5,"\"rice\"":29.36}]]
-2	[[{"\"milk\"":27.45}],[{"\"beef\"":210.57,"\"yogurt\"":126.57}],[{"\"chocolate\"":3.65,"\"water\"":420.36}]]
-3	[[{"\"orange\"":41.35,"\"apple\"":30.5}],[{"\"icecream\"":210.03,"\"coffee":500.0,"\"banana\"":100.56}]]
+1	[[{"\"juice\"":21.45,"\"bread\"":15.2}],[{"\"grape\"":1200.5,"\"rice\"":29.36}]]
+2	[[{"\"milk\"":27.45}],[{"\"yogurt\"":126.57,"\"beef\"":210.57}],[{"\"chocolate\"":3.65,"\"water\"":420.36}]]
+3	[[{"\"apple\"":30.5,"\"orange\"":41.35}],[{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}]]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(array(o.sub)))
 FROM customers c
 INNER JOIN nested_orders o
@@ -499,9 +499,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[[{"\"bread\"":15.2,"\"juice\"":21.45}],[{"\"bread\"":15.2,"\"juice\"":21.45}],[{"\"grape\"":1200.5,"\"rice\"":29.36}]]
-2	[[{"\"milk\"":27.45}],[{"\"beef\"":210.57,"\"yogurt\"":126.57}],[{"\"chocolate\"":3.65,"\"water\"":420.36}]]
-3	[[{"\"orange\"":41.35,"\"apple\"":30.5}],[{"\"icecream\"":210.03,"\"coffee":500.0,"\"banana\"":100.56}]]
+1	[[{"\"juice\"":21.45,"\"bread\"":15.2}],[{"\"juice\"":21.45,"\"bread\"":15.2}],[{"\"grape\"":1200.5,"\"rice\"":29.36}]]
+2	[[{"\"milk\"":27.45}],[{"\"yogurt\"":126.57,"\"beef\"":210.57}],[{"\"chocolate\"":3.65,"\"water\"":420.36}]]
+3	[[{"\"apple\"":30.5,"\"orange\"":41.35}],[{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}]]
 PREHOOK: query: -- 2.3 when field is list
 
 SELECT c.id, sort_array(collect_set(array(map_values(o.sub))))
@@ -522,9 +522,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[[[15.2,21.45]],[[1200.5,29.36]]]
-2	[[[3.65,420.36]],[[27.45]],[[210.57,126.57]]]
-3	[[[41.35,30.5]],[[210.03,500.0,100.56]]]
+1	[[[21.45,15.2]],[[1200.5,29.36]]]
+2	[[[3.65,420.36]],[[27.45]],[[126.57,210.57]]]
+3	[[[30.5,41.35]],[[210.03,100.56,500.0]]]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(array(map_values(o.sub))))
 FROM customers c
 INNER JOIN nested_orders o
@@ -541,9 +541,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[[[15.2,21.45]],[[15.2,21.45]],[[1200.5,29.36]]]
-2	[[[3.65,420.36]],[[27.45]],[[210.57,126.57]]]
-3	[[[41.35,30.5]],[[210.03,500.0,100.56]]]
+1	[[[21.45,15.2]],[[21.45,15.2]],[[1200.5,29.36]]]
+2	[[[3.65,420.36]],[[27.45]],[[126.57,210.57]]]
+3	[[[30.5,41.35]],[[210.03,100.56,500.0]]]
 PREHOOK: query: -- 3. test map
 
 -- 3.1 when field is primitive
@@ -652,9 +652,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"sub":{"\"bread\"":15.2,"\"juice\"":21.45}},{"sub":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
-2	[{"sub":{"\"milk\"":27.45}},{"sub":{"\"beef\"":210.57,"\"yogurt\"":126.57}},{"sub":{"\"chocolate\"":3.65,"\"water\"":420.36}}]
-3	[{"sub":{"\"orange\"":41.35,"\"apple\"":30.5}},{"sub":{"\"icecream\"":210.03,"\"coffee":500.0,"\"banana\"":100.56}}]
+1	[{"sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"sub":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
+2	[{"sub":{"\"milk\"":27.45}},{"sub":{"\"yogurt\"":126.57,"\"beef\"":210.57}},{"sub":{"\"chocolate\"":3.65,"\"water\"":420.36}}]
+3	[{"sub":{"\"apple\"":30.5,"\"orange\"":41.35}},{"sub":{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}}]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(map("sub", o.sub)))
 FROM customers c
 INNER JOIN nested_orders o
@@ -671,9 +671,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"sub":{"\"bread\"":15.2,"\"juice\"":21.45}},{"sub":{"\"bread\"":15.2,"\"juice\"":21.45}},{"sub":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
-2	[{"sub":{"\"milk\"":27.45}},{"sub":{"\"beef\"":210.57,"\"yogurt\"":126.57}},{"sub":{"\"chocolate\"":3.65,"\"water\"":420.36}}]
-3	[{"sub":{"\"orange\"":41.35,"\"apple\"":30.5}},{"sub":{"\"icecream\"":210.03,"\"coffee":500.0,"\"banana\"":100.56}}]
+1	[{"sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"sub":{"\"juice\"":21.45,"\"bread\"":15.2}},{"sub":{"\"grape\"":1200.5,"\"rice\"":29.36}}]
+2	[{"sub":{"\"milk\"":27.45}},{"sub":{"\"yogurt\"":126.57,"\"beef\"":210.57}},{"sub":{"\"chocolate\"":3.65,"\"water\"":420.36}}]
+3	[{"sub":{"\"apple\"":30.5,"\"orange\"":41.35}},{"sub":{"\"icecream\"":210.03,"\"banana\"":100.56,"\"coffee":500.0}}]
 PREHOOK: query: -- 3.3 when field is list
 
 SELECT c.id, sort_array(collect_set(map("sub", map_values(o.sub))))
@@ -694,9 +694,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"sub":[15.2,21.45]},{"sub":[1200.5,29.36]}]
-2	[{"sub":[3.65,420.36]},{"sub":[27.45]},{"sub":[210.57,126.57]}]
-3	[{"sub":[41.35,30.5]},{"sub":[210.03,500.0,100.56]}]
+1	[{"sub":[21.45,15.2]},{"sub":[1200.5,29.36]}]
+2	[{"sub":[3.65,420.36]},{"sub":[27.45]},{"sub":[126.57,210.57]}]
+3	[{"sub":[30.5,41.35]},{"sub":[210.03,100.56,500.0]}]
 PREHOOK: query: SELECT c.id, sort_array(collect_list(map("sub", map_values(o.sub))))
 FROM customers c
 INNER JOIN nested_orders o
@@ -713,9 +713,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@customers
 POSTHOOK: Input: default@nested_orders
 #### A masked pattern was here ####
-1	[{"sub":[15.2,21.45]},{"sub":[15.2,21.45]},{"sub":[1200.5,29.36]}]
-2	[{"sub":[3.65,420.36]},{"sub":[27.45]},{"sub":[210.57,126.57]}]
-3	[{"sub":[41.35,30.5]},{"sub":[210.03,500.0,100.56]}]
+1	[{"sub":[21.45,15.2]},{"sub":[21.45,15.2]},{"sub":[1200.5,29.36]}]
+2	[{"sub":[3.65,420.36]},{"sub":[27.45]},{"sub":[126.57,210.57]}]
+3	[{"sub":[30.5,41.35]},{"sub":[210.03,100.56,500.0]}]
 PREHOOK: query: -- clean up
 
 DROP TABLE customer

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/udf_sort_array.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_sort_array.q.out b/ql/src/test/results/clientpositive/udf_sort_array.q.out
index 523b297..a29763a 100644
--- a/ql/src/test/results/clientpositive/udf_sort_array.q.out
+++ b/ql/src/test/results/clientpositive/udf_sort_array.q.out
@@ -119,7 +119,7 @@ SELECT sort_array(array(map("b", 2, "a", 9, "c", 7), map("c", 3, "b", 5, "a", 1)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
-[{"b":5,"a":1,"c":3},{"b":8,"a":1,"c":6},{"b":2,"a":9,"c":7}]
+[{"a":1,"b":5,"c":3},{"a":1,"b":8,"c":6},{"a":9,"b":2,"c":7}]
 PREHOOK: query: -- Test it against data in a table.
 CREATE TABLE dest1 (
 	tinyints ARRAY<TINYINT>,

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/vector_complex_all.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_complex_all.q.out b/ql/src/test/results/clientpositive/vector_complex_all.q.out
index 2ae7c1b..a54a371 100644
--- a/ql/src/test/results/clientpositive/vector_complex_all.q.out
+++ b/ql/src/test/results/clientpositive/vector_complex_all.q.out
@@ -108,9 +108,9 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@orc_create_complex
 #### A masked pattern was here ####
 orc_create_complex.str	orc_create_complex.mp	orc_create_complex.lst	orc_create_complex.strct
-line1	{"key13":"value13","key12":"value12","key11":"value11"}	["a","b","c"]	{"a":"one","b":"two"}
-line2	{"key21":"value21","key23":"value23","key22":"value22"}	["d","e","f"]	{"a":"three","b":"four"}
-line3	{"key33":"value33","key31":"value31","key32":"value32"}	["g","h","i"]	{"a":"five","b":"six"}
+line1	{"key13":"value13","key11":"value11","key12":"value12"}	["a","b","c"]	{"a":"one","b":"two"}
+line2	{"key21":"value21","key22":"value22","key23":"value23"}	["d","e","f"]	{"a":"three","b":"four"}
+line3	{"key31":"value31","key32":"value32","key33":"value33"}	["g","h","i"]	{"a":"five","b":"six"}
 PREHOOK: query: -- However, since this query is not referencing the complex fields, it should vectorize.
 EXPLAIN
 SELECT COUNT(*) FROM orc_create_complex

http://git-wip-us.apache.org/repos/asf/hive/blob/f38a42e5/ql/src/test/results/clientpositive/vector_udf1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_udf1.q.out b/ql/src/test/results/clientpositive/vector_udf1.q.out
index 232d78e..92fa06f 100644
--- a/ql/src/test/results/clientpositive/vector_udf1.q.out
+++ b/ql/src/test/results/clientpositive/vector_udf1.q.out
@@ -1285,7 +1285,7 @@ from varchar_udf_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@varchar_udf_1
 #### A masked pattern was here ####
-{"b":"2","a":"1","c":"3"}	{"b":"2","a":"1","c":"3"}
+{"a":"1","b":"2","c":"3"}	{"a":"1","b":"2","c":"3"}
 PREHOOK: query: explain
 select
   substr(c2, 1, 3),


[02/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/varchar_udf1.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/varchar_udf1.q.java1.7.out b/ql/src/test/results/clientpositive/varchar_udf1.q.java1.7.out
deleted file mode 100644
index 459d93b..0000000
--- a/ql/src/test/results/clientpositive/varchar_udf1.q.java1.7.out
+++ /dev/null
@@ -1,457 +0,0 @@
-PREHOOK: query: drop table varchar_udf_1
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table varchar_udf_1
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table varchar_udf_1 (c1 string, c2 string, c3 varchar(10), c4 varchar(20))
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@varchar_udf_1
-POSTHOOK: query: create table varchar_udf_1 (c1 string, c2 string, c3 varchar(10), c4 varchar(20))
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@varchar_udf_1
-PREHOOK: query: insert overwrite table varchar_udf_1
-  select key, value, key, value from src where key = '238' limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@varchar_udf_1
-POSTHOOK: query: insert overwrite table varchar_udf_1
-  select key, value, key, value from src where key = '238' limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@varchar_udf_1
-POSTHOOK: Lineage: varchar_udf_1.c1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: varchar_udf_1.c2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: varchar_udf_1.c3 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: varchar_udf_1.c4 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- UDFs with varchar support
-select 
-  concat(c1, c2),
-  concat(c3, c4),
-  concat(c1, c2) = concat(c3, c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- UDFs with varchar support
-select 
-  concat(c1, c2),
-  concat(c3, c4),
-  concat(c1, c2) = concat(c3, c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-238val_238	238val_238	true
-PREHOOK: query: select
-  upper(c2),
-  upper(c4),
-  upper(c2) = upper(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  upper(c2),
-  upper(c4),
-  upper(c2) = upper(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-VAL_238	VAL_238	true
-PREHOOK: query: select
-  lower(c2),
-  lower(c4),
-  lower(c2) = lower(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  lower(c2),
-  lower(c4),
-  lower(c2) = lower(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: -- Scalar UDFs
-select
-  ascii(c2),
-  ascii(c4),
-  ascii(c2) = ascii(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: -- Scalar UDFs
-select
-  ascii(c2),
-  ascii(c4),
-  ascii(c2) = ascii(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-118	118	true
-PREHOOK: query: select 
-  concat_ws('|', c1, c2),
-  concat_ws('|', c3, c4),
-  concat_ws('|', c1, c2) = concat_ws('|', c3, c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select 
-  concat_ws('|', c1, c2),
-  concat_ws('|', c3, c4),
-  concat_ws('|', c1, c2) = concat_ws('|', c3, c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-238|val_238	238|val_238	true
-PREHOOK: query: select
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c4, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII') = decode(encode(c4, 'US-ASCII'), 'US-ASCII')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c4, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII') = decode(encode(c4, 'US-ASCII'), 'US-ASCII')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: select
-  instr(c2, '_'),
-  instr(c4, '_'),
-  instr(c2, '_') = instr(c4, '_')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  instr(c2, '_'),
-  instr(c4, '_'),
-  instr(c2, '_') = instr(c4, '_')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-4	4	true
-PREHOOK: query: select
-  length(c2),
-  length(c4),
-  length(c2) = length(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  length(c2),
-  length(c4),
-  length(c2) = length(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-7	7	true
-PREHOOK: query: select
-  locate('a', 'abcdabcd', 3),
-  locate(cast('a' as varchar(1)), cast('abcdabcd' as varchar(10)), 3),
-  locate('a', 'abcdabcd', 3) = locate(cast('a' as varchar(1)), cast('abcdabcd' as varchar(10)), 3)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  locate('a', 'abcdabcd', 3),
-  locate(cast('a' as varchar(1)), cast('abcdabcd' as varchar(10)), 3),
-  locate('a', 'abcdabcd', 3) = locate(cast('a' as varchar(1)), cast('abcdabcd' as varchar(10)), 3)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-5	5	true
-PREHOOK: query: select
-  lpad(c2, 15, ' '),
-  lpad(c4, 15, ' '),
-  lpad(c2, 15, ' ') = lpad(c4, 15, ' ')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  lpad(c2, 15, ' '),
-  lpad(c4, 15, ' '),
-  lpad(c2, 15, ' ') = lpad(c4, 15, ' ')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-        val_238	        val_238	true
-PREHOOK: query: select
-  ltrim(c2),
-  ltrim(c4),
-  ltrim(c2) = ltrim(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  ltrim(c2),
-  ltrim(c4),
-  ltrim(c2) = ltrim(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: select
-  c2 regexp 'val',
-  c4 regexp 'val',
-  (c2 regexp 'val') = (c4 regexp 'val')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  c2 regexp 'val',
-  c4 regexp 'val',
-  (c2 regexp 'val') = (c4 regexp 'val')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-true	true	true
-PREHOOK: query: select
-  regexp_extract(c2, 'val_([0-9]+)', 1),
-  regexp_extract(c4, 'val_([0-9]+)', 1),
-  regexp_extract(c2, 'val_([0-9]+)', 1) = regexp_extract(c4, 'val_([0-9]+)', 1)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  regexp_extract(c2, 'val_([0-9]+)', 1),
-  regexp_extract(c4, 'val_([0-9]+)', 1),
-  regexp_extract(c2, 'val_([0-9]+)', 1) = regexp_extract(c4, 'val_([0-9]+)', 1)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-238	238	true
-PREHOOK: query: select
-  regexp_replace(c2, 'val', 'replaced'),
-  regexp_replace(c4, 'val', 'replaced'),
-  regexp_replace(c2, 'val', 'replaced') = regexp_replace(c4, 'val', 'replaced')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  regexp_replace(c2, 'val', 'replaced'),
-  regexp_replace(c4, 'val', 'replaced'),
-  regexp_replace(c2, 'val', 'replaced') = regexp_replace(c4, 'val', 'replaced')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-replaced_238	replaced_238	true
-PREHOOK: query: select
-  reverse(c2),
-  reverse(c4),
-  reverse(c2) = reverse(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  reverse(c2),
-  reverse(c4),
-  reverse(c2) = reverse(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-832_lav	832_lav	true
-PREHOOK: query: select
-  rpad(c2, 15, ' '),
-  rpad(c4, 15, ' '),
-  rpad(c2, 15, ' ') = rpad(c4, 15, ' ')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  rpad(c2, 15, ' '),
-  rpad(c4, 15, ' '),
-  rpad(c2, 15, ' ') = rpad(c4, 15, ' ')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238        	val_238        	true
-PREHOOK: query: select
-  rtrim(c2),
-  rtrim(c4),
-  rtrim(c2) = rtrim(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  rtrim(c2),
-  rtrim(c4),
-  rtrim(c2) = rtrim(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: select
-  sentences('See spot run.  See jane run.'),
-  sentences(cast('See spot run.  See jane run.' as varchar(50)))
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  sentences('See spot run.  See jane run.'),
-  sentences(cast('See spot run.  See jane run.' as varchar(50)))
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-[["See","spot","run"],["See","jane","run"]]	[["See","spot","run"],["See","jane","run"]]
-PREHOOK: query: select
-  split(c2, '_'),
-  split(c4, '_')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  split(c2, '_'),
-  split(c4, '_')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-["val","238"]	["val","238"]
-PREHOOK: query: select 
-  str_to_map('a:1,b:2,c:3',',',':'),
-  str_to_map(cast('a:1,b:2,c:3' as varchar(20)),',',':')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select 
-  str_to_map('a:1,b:2,c:3',',',':'),
-  str_to_map(cast('a:1,b:2,c:3' as varchar(20)),',',':')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-{"b":"2","a":"1","c":"3"}	{"b":"2","a":"1","c":"3"}
-PREHOOK: query: select
-  substr(c2, 1, 3),
-  substr(c4, 1, 3),
-  substr(c2, 1, 3) = substr(c4, 1, 3)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  substr(c2, 1, 3),
-  substr(c4, 1, 3),
-  substr(c2, 1, 3) = substr(c4, 1, 3)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val	val	true
-PREHOOK: query: select
-  trim(c2),
-  trim(c4),
-  trim(c2) = trim(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  trim(c2),
-  trim(c4),
-  trim(c2) = trim(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: -- Aggregate Functions
-select
-  compute_stats(c2, 16),
-  compute_stats(c4, 16)
-from varchar_udf_1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: -- Aggregate Functions
-select
-  compute_stats(c2, 16),
-  compute_stats(c4, 16)
-from varchar_udf_1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1,"ndvbitvector":"{0}{3}{2}{3}{1}{0}{2}{0}{1}{0}{0}{1}{3}{2}{0}{3}"}	{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1,"ndvbitvector":"{0}{3}{2}{3}{1}{0}{2}{0}{1}{0}{0}{1}{3}{2}{0}{3}"}
-PREHOOK: query: select
-  min(c2),
-  min(c4)
-from varchar_udf_1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  min(c2),
-  min(c4)
-from varchar_udf_1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238
-PREHOOK: query: select
-  max(c2),
-  max(c4)
-from varchar_udf_1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  max(c2),
-  max(c4)
-from varchar_udf_1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238
-PREHOOK: query: drop table varchar_udf_1
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@varchar_udf_1
-PREHOOK: Output: default@varchar_udf_1
-POSTHOOK: query: drop table varchar_udf_1
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@varchar_udf_1
-POSTHOOK: Output: default@varchar_udf_1

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/varchar_udf1.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/varchar_udf1.q.java1.8.out b/ql/src/test/results/clientpositive/varchar_udf1.q.java1.8.out
deleted file mode 100644
index ace8568..0000000
--- a/ql/src/test/results/clientpositive/varchar_udf1.q.java1.8.out
+++ /dev/null
@@ -1,457 +0,0 @@
-PREHOOK: query: drop table varchar_udf_1
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table varchar_udf_1
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table varchar_udf_1 (c1 string, c2 string, c3 varchar(10), c4 varchar(20))
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@varchar_udf_1
-POSTHOOK: query: create table varchar_udf_1 (c1 string, c2 string, c3 varchar(10), c4 varchar(20))
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@varchar_udf_1
-PREHOOK: query: insert overwrite table varchar_udf_1
-  select key, value, key, value from src where key = '238' limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@varchar_udf_1
-POSTHOOK: query: insert overwrite table varchar_udf_1
-  select key, value, key, value from src where key = '238' limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@varchar_udf_1
-POSTHOOK: Lineage: varchar_udf_1.c1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: varchar_udf_1.c2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: varchar_udf_1.c3 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: varchar_udf_1.c4 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- UDFs with varchar support
-select 
-  concat(c1, c2),
-  concat(c3, c4),
-  concat(c1, c2) = concat(c3, c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- UDFs with varchar support
-select 
-  concat(c1, c2),
-  concat(c3, c4),
-  concat(c1, c2) = concat(c3, c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-238val_238	238val_238	true
-PREHOOK: query: select
-  upper(c2),
-  upper(c4),
-  upper(c2) = upper(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  upper(c2),
-  upper(c4),
-  upper(c2) = upper(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-VAL_238	VAL_238	true
-PREHOOK: query: select
-  lower(c2),
-  lower(c4),
-  lower(c2) = lower(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  lower(c2),
-  lower(c4),
-  lower(c2) = lower(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: -- Scalar UDFs
-select
-  ascii(c2),
-  ascii(c4),
-  ascii(c2) = ascii(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: -- Scalar UDFs
-select
-  ascii(c2),
-  ascii(c4),
-  ascii(c2) = ascii(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-118	118	true
-PREHOOK: query: select 
-  concat_ws('|', c1, c2),
-  concat_ws('|', c3, c4),
-  concat_ws('|', c1, c2) = concat_ws('|', c3, c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select 
-  concat_ws('|', c1, c2),
-  concat_ws('|', c3, c4),
-  concat_ws('|', c1, c2) = concat_ws('|', c3, c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-238|val_238	238|val_238	true
-PREHOOK: query: select
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c4, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII') = decode(encode(c4, 'US-ASCII'), 'US-ASCII')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c4, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII') = decode(encode(c4, 'US-ASCII'), 'US-ASCII')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: select
-  instr(c2, '_'),
-  instr(c4, '_'),
-  instr(c2, '_') = instr(c4, '_')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  instr(c2, '_'),
-  instr(c4, '_'),
-  instr(c2, '_') = instr(c4, '_')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-4	4	true
-PREHOOK: query: select
-  length(c2),
-  length(c4),
-  length(c2) = length(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  length(c2),
-  length(c4),
-  length(c2) = length(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-7	7	true
-PREHOOK: query: select
-  locate('a', 'abcdabcd', 3),
-  locate(cast('a' as varchar(1)), cast('abcdabcd' as varchar(10)), 3),
-  locate('a', 'abcdabcd', 3) = locate(cast('a' as varchar(1)), cast('abcdabcd' as varchar(10)), 3)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  locate('a', 'abcdabcd', 3),
-  locate(cast('a' as varchar(1)), cast('abcdabcd' as varchar(10)), 3),
-  locate('a', 'abcdabcd', 3) = locate(cast('a' as varchar(1)), cast('abcdabcd' as varchar(10)), 3)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-5	5	true
-PREHOOK: query: select
-  lpad(c2, 15, ' '),
-  lpad(c4, 15, ' '),
-  lpad(c2, 15, ' ') = lpad(c4, 15, ' ')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  lpad(c2, 15, ' '),
-  lpad(c4, 15, ' '),
-  lpad(c2, 15, ' ') = lpad(c4, 15, ' ')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-        val_238	        val_238	true
-PREHOOK: query: select
-  ltrim(c2),
-  ltrim(c4),
-  ltrim(c2) = ltrim(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  ltrim(c2),
-  ltrim(c4),
-  ltrim(c2) = ltrim(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: select
-  regexp(c2, 'val'),
-  regexp(c4, 'val'),
-  regexp(c2, 'val') = regexp(c4, 'val')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  regexp(c2, 'val'),
-  regexp(c4, 'val'),
-  regexp(c2, 'val') = regexp(c4, 'val')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-true	true	true
-PREHOOK: query: select
-  regexp_extract(c2, 'val_([0-9]+)', 1),
-  regexp_extract(c4, 'val_([0-9]+)', 1),
-  regexp_extract(c2, 'val_([0-9]+)', 1) = regexp_extract(c4, 'val_([0-9]+)', 1)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  regexp_extract(c2, 'val_([0-9]+)', 1),
-  regexp_extract(c4, 'val_([0-9]+)', 1),
-  regexp_extract(c2, 'val_([0-9]+)', 1) = regexp_extract(c4, 'val_([0-9]+)', 1)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-238	238	true
-PREHOOK: query: select
-  regexp_replace(c2, 'val', 'replaced'),
-  regexp_replace(c4, 'val', 'replaced'),
-  regexp_replace(c2, 'val', 'replaced') = regexp_replace(c4, 'val', 'replaced')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  regexp_replace(c2, 'val', 'replaced'),
-  regexp_replace(c4, 'val', 'replaced'),
-  regexp_replace(c2, 'val', 'replaced') = regexp_replace(c4, 'val', 'replaced')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-replaced_238	replaced_238	true
-PREHOOK: query: select
-  reverse(c2),
-  reverse(c4),
-  reverse(c2) = reverse(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  reverse(c2),
-  reverse(c4),
-  reverse(c2) = reverse(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-832_lav	832_lav	true
-PREHOOK: query: select
-  rpad(c2, 15, ' '),
-  rpad(c4, 15, ' '),
-  rpad(c2, 15, ' ') = rpad(c4, 15, ' ')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  rpad(c2, 15, ' '),
-  rpad(c4, 15, ' '),
-  rpad(c2, 15, ' ') = rpad(c4, 15, ' ')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238        	val_238        	true
-PREHOOK: query: select
-  rtrim(c2),
-  rtrim(c4),
-  rtrim(c2) = rtrim(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  rtrim(c2),
-  rtrim(c4),
-  rtrim(c2) = rtrim(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: select
-  sentences('See spot run.  See jane run.'),
-  sentences(cast('See spot run.  See jane run.' as varchar(50)))
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  sentences('See spot run.  See jane run.'),
-  sentences(cast('See spot run.  See jane run.' as varchar(50)))
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-[["See","spot","run"],["See","jane","run"]]	[["See","spot","run"],["See","jane","run"]]
-PREHOOK: query: select
-  split(c2, '_'),
-  split(c4, '_')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  split(c2, '_'),
-  split(c4, '_')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-["val","238"]	["val","238"]
-PREHOOK: query: select 
-  str_to_map('a:1,b:2,c:3',',',':'),
-  str_to_map(cast('a:1,b:2,c:3' as varchar(20)),',',':')
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select 
-  str_to_map('a:1,b:2,c:3',',',':'),
-  str_to_map(cast('a:1,b:2,c:3' as varchar(20)),',',':')
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-{"a":"1","b":"2","c":"3"}	{"a":"1","b":"2","c":"3"}
-PREHOOK: query: select
-  substr(c2, 1, 3),
-  substr(c4, 1, 3),
-  substr(c2, 1, 3) = substr(c4, 1, 3)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  substr(c2, 1, 3),
-  substr(c4, 1, 3),
-  substr(c2, 1, 3) = substr(c4, 1, 3)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val	val	true
-PREHOOK: query: select
-  trim(c2),
-  trim(c4),
-  trim(c2) = trim(c4)
-from varchar_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  trim(c2),
-  trim(c4),
-  trim(c2) = trim(c4)
-from varchar_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: -- Aggregate Functions
-select
-  compute_stats(c2, 16),
-  compute_stats(c4, 16)
-from varchar_udf_1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: -- Aggregate Functions
-select
-  compute_stats(c2, 16),
-  compute_stats(c4, 16)
-from varchar_udf_1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1}	{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1}
-PREHOOK: query: select
-  min(c2),
-  min(c4)
-from varchar_udf_1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  min(c2),
-  min(c4)
-from varchar_udf_1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238
-PREHOOK: query: select
-  max(c2),
-  max(c4)
-from varchar_udf_1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  max(c2),
-  max(c4)
-from varchar_udf_1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@varchar_udf_1
-#### A masked pattern was here ####
-val_238	val_238
-PREHOOK: query: drop table varchar_udf_1
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@varchar_udf_1
-PREHOOK: Output: default@varchar_udf_1
-POSTHOOK: query: drop table varchar_udf_1
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@varchar_udf_1
-POSTHOOK: Output: default@varchar_udf_1

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/varchar_udf1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/varchar_udf1.q.out b/ql/src/test/results/clientpositive/varchar_udf1.q.out
new file mode 100644
index 0000000..e5cfce5
--- /dev/null
+++ b/ql/src/test/results/clientpositive/varchar_udf1.q.out
@@ -0,0 +1,453 @@
+PREHOOK: query: drop table varchar_udf_1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table varchar_udf_1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table varchar_udf_1 (c1 string, c2 string, c3 varchar(10), c4 varchar(20))
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@varchar_udf_1
+POSTHOOK: query: create table varchar_udf_1 (c1 string, c2 string, c3 varchar(10), c4 varchar(20))
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@varchar_udf_1
+PREHOOK: query: insert overwrite table varchar_udf_1
+  select key, value, key, value from src where key = '238' limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@varchar_udf_1
+POSTHOOK: query: insert overwrite table varchar_udf_1
+  select key, value, key, value from src where key = '238' limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@varchar_udf_1
+POSTHOOK: Lineage: varchar_udf_1.c1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: varchar_udf_1.c2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: varchar_udf_1.c3 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: varchar_udf_1.c4 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- UDFs with varchar support
+select 
+  concat(c1, c2),
+  concat(c3, c4),
+  concat(c1, c2) = concat(c3, c4)
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: -- UDFs with varchar support
+select 
+  concat(c1, c2),
+  concat(c3, c4),
+  concat(c1, c2) = concat(c3, c4)
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+238val_238	238val_238	true
+PREHOOK: query: select
+  upper(c2),
+  upper(c4),
+  upper(c2) = upper(c4)
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  upper(c2),
+  upper(c4),
+  upper(c2) = upper(c4)
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+VAL_238	VAL_238	true
+PREHOOK: query: select
+  lower(c2),
+  lower(c4),
+  lower(c2) = lower(c4)
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  lower(c2),
+  lower(c4),
+  lower(c2) = lower(c4)
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+val_238	val_238	true
+PREHOOK: query: -- Scalar UDFs
+select
+  ascii(c2),
+  ascii(c4),
+  ascii(c2) = ascii(c4)
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: -- Scalar UDFs
+select
+  ascii(c2),
+  ascii(c4),
+  ascii(c2) = ascii(c4)
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+118	118	true
+PREHOOK: query: select 
+  concat_ws('|', c1, c2),
+  concat_ws('|', c3, c4),
+  concat_ws('|', c1, c2) = concat_ws('|', c3, c4)
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select 
+  concat_ws('|', c1, c2),
+  concat_ws('|', c3, c4),
+  concat_ws('|', c1, c2) = concat_ws('|', c3, c4)
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+238|val_238	238|val_238	true
+PREHOOK: query: select
+  decode(encode(c2, 'US-ASCII'), 'US-ASCII'),
+  decode(encode(c4, 'US-ASCII'), 'US-ASCII'),
+  decode(encode(c2, 'US-ASCII'), 'US-ASCII') = decode(encode(c4, 'US-ASCII'), 'US-ASCII')
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  decode(encode(c2, 'US-ASCII'), 'US-ASCII'),
+  decode(encode(c4, 'US-ASCII'), 'US-ASCII'),
+  decode(encode(c2, 'US-ASCII'), 'US-ASCII') = decode(encode(c4, 'US-ASCII'), 'US-ASCII')
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+val_238	val_238	true
+PREHOOK: query: select
+  instr(c2, '_'),
+  instr(c4, '_'),
+  instr(c2, '_') = instr(c4, '_')
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  instr(c2, '_'),
+  instr(c4, '_'),
+  instr(c2, '_') = instr(c4, '_')
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+4	4	true
+PREHOOK: query: select
+  length(c2),
+  length(c4),
+  length(c2) = length(c4)
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  length(c2),
+  length(c4),
+  length(c2) = length(c4)
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+7	7	true
+PREHOOK: query: select
+  locate('a', 'abcdabcd', 3),
+  locate(cast('a' as varchar(1)), cast('abcdabcd' as varchar(10)), 3),
+  locate('a', 'abcdabcd', 3) = locate(cast('a' as varchar(1)), cast('abcdabcd' as varchar(10)), 3)
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  locate('a', 'abcdabcd', 3),
+  locate(cast('a' as varchar(1)), cast('abcdabcd' as varchar(10)), 3),
+  locate('a', 'abcdabcd', 3) = locate(cast('a' as varchar(1)), cast('abcdabcd' as varchar(10)), 3)
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+5	5	true
+PREHOOK: query: select
+  lpad(c2, 15, ' '),
+  lpad(c4, 15, ' '),
+  lpad(c2, 15, ' ') = lpad(c4, 15, ' ')
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  lpad(c2, 15, ' '),
+  lpad(c4, 15, ' '),
+  lpad(c2, 15, ' ') = lpad(c4, 15, ' ')
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+        val_238	        val_238	true
+PREHOOK: query: select
+  ltrim(c2),
+  ltrim(c4),
+  ltrim(c2) = ltrim(c4)
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  ltrim(c2),
+  ltrim(c4),
+  ltrim(c2) = ltrim(c4)
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+val_238	val_238	true
+PREHOOK: query: select
+  c2 regexp 'val',
+  c4 regexp 'val',
+  (c2 regexp 'val') = (c4 regexp 'val')
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  c2 regexp 'val',
+  c4 regexp 'val',
+  (c2 regexp 'val') = (c4 regexp 'val')
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+true	true	true
+PREHOOK: query: select
+  regexp_extract(c2, 'val_([0-9]+)', 1),
+  regexp_extract(c4, 'val_([0-9]+)', 1),
+  regexp_extract(c2, 'val_([0-9]+)', 1) = regexp_extract(c4, 'val_([0-9]+)', 1)
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  regexp_extract(c2, 'val_([0-9]+)', 1),
+  regexp_extract(c4, 'val_([0-9]+)', 1),
+  regexp_extract(c2, 'val_([0-9]+)', 1) = regexp_extract(c4, 'val_([0-9]+)', 1)
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+238	238	true
+PREHOOK: query: select
+  regexp_replace(c2, 'val', 'replaced'),
+  regexp_replace(c4, 'val', 'replaced'),
+  regexp_replace(c2, 'val', 'replaced') = regexp_replace(c4, 'val', 'replaced')
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  regexp_replace(c2, 'val', 'replaced'),
+  regexp_replace(c4, 'val', 'replaced'),
+  regexp_replace(c2, 'val', 'replaced') = regexp_replace(c4, 'val', 'replaced')
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+replaced_238	replaced_238	true
+PREHOOK: query: select
+  reverse(c2),
+  reverse(c4),
+  reverse(c2) = reverse(c4)
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  reverse(c2),
+  reverse(c4),
+  reverse(c2) = reverse(c4)
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+832_lav	832_lav	true
+PREHOOK: query: select
+  rpad(c2, 15, ' '),
+  rpad(c4, 15, ' '),
+  rpad(c2, 15, ' ') = rpad(c4, 15, ' ')
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  rpad(c2, 15, ' '),
+  rpad(c4, 15, ' '),
+  rpad(c2, 15, ' ') = rpad(c4, 15, ' ')
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+val_238        	val_238        	true
+PREHOOK: query: select
+  rtrim(c2),
+  rtrim(c4),
+  rtrim(c2) = rtrim(c4)
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  rtrim(c2),
+  rtrim(c4),
+  rtrim(c2) = rtrim(c4)
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+val_238	val_238	true
+PREHOOK: query: select
+  sentences('See spot run.  See jane run.'),
+  sentences(cast('See spot run.  See jane run.' as varchar(50)))
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  sentences('See spot run.  See jane run.'),
+  sentences(cast('See spot run.  See jane run.' as varchar(50)))
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+[["See","spot","run"],["See","jane","run"]]	[["See","spot","run"],["See","jane","run"]]
+PREHOOK: query: select
+  split(c2, '_'),
+  split(c4, '_')
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  split(c2, '_'),
+  split(c4, '_')
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+["val","238"]	["val","238"]
+PREHOOK: query: select 
+  str_to_map('a:1,b:2,c:3',',',':'),
+  str_to_map(cast('a:1,b:2,c:3' as varchar(20)),',',':')
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select 
+  str_to_map('a:1,b:2,c:3',',',':'),
+  str_to_map(cast('a:1,b:2,c:3' as varchar(20)),',',':')
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+{"a":"1","b":"2","c":"3"}	{"a":"1","b":"2","c":"3"}
+PREHOOK: query: select
+  substr(c2, 1, 3),
+  substr(c4, 1, 3),
+  substr(c2, 1, 3) = substr(c4, 1, 3)
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  substr(c2, 1, 3),
+  substr(c4, 1, 3),
+  substr(c2, 1, 3) = substr(c4, 1, 3)
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+val	val	true
+PREHOOK: query: select
+  trim(c2),
+  trim(c4),
+  trim(c2) = trim(c4)
+from varchar_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  trim(c2),
+  trim(c4),
+  trim(c2) = trim(c4)
+from varchar_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+val_238	val_238	true
+PREHOOK: query: -- Aggregate Functions
+select
+  compute_stats(c2, 16),
+  compute_stats(c4, 16)
+from varchar_udf_1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: -- Aggregate Functions
+select
+  compute_stats(c2, 16),
+  compute_stats(c4, 16)
+from varchar_udf_1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1,"ndvbitvector":"{0}{3}{2}{3}{1}{0}{2}{0}{1}{0}{0}{1}{3}{2}{0}{3}"}	{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1,"ndvbitvector":"{0}{3}{2}{3}{1}{0}{2}{0}{1}{0}{0}{1}{3}{2}{0}{3}"}
+PREHOOK: query: select
+  min(c2),
+  min(c4)
+from varchar_udf_1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  min(c2),
+  min(c4)
+from varchar_udf_1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+val_238	val_238
+PREHOOK: query: select
+  max(c2),
+  max(c4)
+from varchar_udf_1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  max(c2),
+  max(c4)
+from varchar_udf_1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@varchar_udf_1
+#### A masked pattern was here ####
+val_238	val_238
+PREHOOK: query: drop table varchar_udf_1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@varchar_udf_1
+PREHOOK: Output: default@varchar_udf_1
+POSTHOOK: query: drop table varchar_udf_1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@varchar_udf_1
+POSTHOOK: Output: default@varchar_udf_1

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/vector_cast_constant.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_cast_constant.q.java1.7.out b/ql/src/test/results/clientpositive/vector_cast_constant.q.java1.7.out
deleted file mode 100644
index 867dd4c..0000000
--- a/ql/src/test/results/clientpositive/vector_cast_constant.q.java1.7.out
+++ /dev/null
@@ -1,220 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE over1korc
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE over1korc
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k
-POSTHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over1k
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over1k
-PREHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1korc
-PREHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1korc
-POSTHOOK: Lineage: over1korc.b SIMPLE [(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1korc.bin SIMPLE [(over1k)over1k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: over1korc.bo SIMPLE [(over1k)over1k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: over1korc.d SIMPLE [(over1k)over1k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: over1korc.dec SIMPLE [(over1k)over1k.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
-POSTHOOK: Lineage: over1korc.f SIMPLE [(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1korc.i SIMPLE [(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1korc.s SIMPLE [(over1k)over1k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: over1korc.si SIMPLE [(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1korc.t SIMPLE [(over1k)over1k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: over1korc.ts SIMPLE [(over1k)over1k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-PREHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1korc
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: i (type: int)
-              outputColumnNames: _col0
-              Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: avg(50), avg(50.0), avg(50)
-                keys: _col0 (type: int)
-                mode: hash
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: int)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: int)
-                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: struct<count:bigint,sum:double,input:int>), _col2 (type: struct<count:bigint,sum:double,input:double>), _col3 (type: struct<count:bigint,sum:decimal(12,0),input:decimal(10,0)>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: avg(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2)
-          keys: KEY._col0 (type: int)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: int)
-              sort order: +
-              Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-              TopN Hash Memory Usage: 0.1
-              value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: decimal(14,4))
-      Reduce Operator Tree:
-        Select Operator
-          expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: decimal(14,4))
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-          Limit
-            Number of rows: 10
-            Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-            File Output Operator
-              compressed: false
-              Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: 10
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-65536	50.0	50.0	50.0000
-65537	50.0	50.0	50.0000
-65538	50.0	50.0	50.0000
-65539	50.0	50.0	50.0000
-65540	50.0	50.0	50.0000
-65541	50.0	50.0	50.0000
-65542	50.0	50.0	50.0000
-65543	50.0	50.0	50.0000
-65544	50.0	50.0	50.0000
-65545	50.0	50.0	50.0000

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/vector_cast_constant.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_cast_constant.q.java1.8.out b/ql/src/test/results/clientpositive/vector_cast_constant.q.java1.8.out
deleted file mode 100644
index 789e6c2..0000000
--- a/ql/src/test/results/clientpositive/vector_cast_constant.q.java1.8.out
+++ /dev/null
@@ -1,197 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE over1korc
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE over1korc
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k
-POSTHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over1k
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over1k
-PREHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1korc
-PREHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1korc
-POSTHOOK: Lineage: over1korc.b SIMPLE [(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1korc.bin SIMPLE [(over1k)over1k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: over1korc.bo SIMPLE [(over1k)over1k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: over1korc.d SIMPLE [(over1k)over1k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: over1korc.dec SIMPLE [(over1k)over1k.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
-POSTHOOK: Lineage: over1korc.f SIMPLE [(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1korc.i SIMPLE [(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1korc.s SIMPLE [(over1k)over1k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: over1korc.si SIMPLE [(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1korc.t SIMPLE [(over1k)over1k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: over1korc.ts SIMPLE [(over1k)over1k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-PREHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1korc
-            Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: i (type: int)
-              outputColumnNames: _col0
-              Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-              Group By Operator
-                aggregations: avg(50), avg(50.0), avg(50)
-                keys: _col0 (type: int)
-                mode: hash
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: int)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: int)
-                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: struct<count:bigint,sum:double,input:int>), _col2 (type: struct<count:bigint,sum:double,input:double>), _col3 (type: struct<count:bigint,sum:decimal(12,0),input:decimal(10,0)>)
-      Execution mode: vectorized
-      Reduce Operator Tree:
-        Group By Operator
-          aggregations: avg(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2)
-          keys: KEY._col0 (type: int)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-          Limit
-            Number of rows: 10
-            Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-            File Output Operator
-              compressed: false
-              Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: 10
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-65536	50.0	50.0	50
-65537	50.0	50.0	50
-65538	50.0	50.0	50
-65539	50.0	50.0	50
-65540	50.0	50.0	50
-65541	50.0	50.0	50
-65542	50.0	50.0	50
-65543	50.0	50.0	50
-65544	50.0	50.0	50
-65545	50.0	50.0	50

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/vector_cast_constant.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_cast_constant.q.out b/ql/src/test/results/clientpositive/vector_cast_constant.q.out
index 39ed1c8..6033aad 100644
--- a/ql/src/test/results/clientpositive/vector_cast_constant.q.out
+++ b/ql/src/test/results/clientpositive/vector_cast_constant.q.out
@@ -102,18 +102,19 @@ PREHOOK: query: EXPLAIN SELECT
   AVG(CAST(50 AS INT)) AS `avg_int_ok`,
   AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
   AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN SELECT 
   i,
   AVG(CAST(50 AS INT)) AS `avg_int_ok`,
   AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
   AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
 
 STAGE PLANS:
   Stage: Stage-1
@@ -146,6 +147,28 @@ STAGE PLANS:
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3
           Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              key expressions: _col0 (type: int)
+              sort order: +
+              Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
+              TopN Hash Memory Usage: 0.1
+              value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: decimal(14,4))
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: decimal(14,4))
+          outputColumnNames: _col0, _col1, _col2, _col3
+          Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
           Limit
             Number of rows: 10
             Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
@@ -168,7 +191,7 @@ PREHOOK: query: SELECT
   AVG(CAST(50 AS INT)) AS `avg_int_ok`,
   AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
   AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@over1korc
 #### A masked pattern was here ####
@@ -177,17 +200,17 @@ POSTHOOK: query: SELECT
   AVG(CAST(50 AS INT)) AS `avg_int_ok`,
   AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
   AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i LIMIT 10
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1korc
 #### A masked pattern was here ####
-65536	50.0	50.0	50
-65537	50.0	50.0	50
-65538	50.0	50.0	50
-65539	50.0	50.0	50
-65540	50.0	50.0	50
-65541	50.0	50.0	50
-65542	50.0	50.0	50
-65543	50.0	50.0	50
-65544	50.0	50.0	50
-65545	50.0	50.0	50
+65536	50.0	50.0	50.0000
+65537	50.0	50.0	50.0000
+65538	50.0	50.0	50.0000
+65539	50.0	50.0	50.0000
+65540	50.0	50.0	50.0000
+65541	50.0	50.0	50.0000
+65542	50.0	50.0	50.0000
+65543	50.0	50.0	50.0000
+65544	50.0	50.0	50.0000
+65545	50.0	50.0	50.0000


[26/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/cbo_rp_outer_join_ppr.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cbo_rp_outer_join_ppr.q.java1.7.out b/ql/src/test/results/clientpositive/cbo_rp_outer_join_ppr.q.java1.7.out
deleted file mode 100644
index 5c40dc4..0000000
--- a/ql/src/test/results/clientpositive/cbo_rp_outer_join_ppr.q.java1.7.out
+++ /dev/null
@@ -1,693 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-PREHOOK: type: QUERY
-POSTHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: a
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: key, value
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: key (type: string)
-                null sort order: a
-                sort order: +
-                Map-reduce partition columns: key (type: string)
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                tag: 0
-                value expressions: value (type: string)
-                auto parallelism: false
-          TableScan
-            alias: b
-            Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string), ds (type: string)
-              outputColumnNames: key, value, ds
-              Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: key (type: string)
-                null sort order: a
-                sort order: +
-                Map-reduce partition columns: key (type: string)
-                Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                tag: 1
-                value expressions: value (type: string), ds (type: string)
-                auto parallelism: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-09
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-09
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /src [a]
-        /srcpart/ds=2008-04-08/hr=11 [b]
-        /srcpart/ds=2008-04-08/hr=12 [b]
-        /srcpart/ds=2008-04-09/hr=11 [b]
-        /srcpart/ds=2008-04-09/hr=12 [b]
-      Needs Tagging: true
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Outer Join 0 to 1
-          filter mappings:
-            1 [0, 1]
-          filter predicates:
-            0 
-            1 {(VALUE.ds = '2008-04-08')}
-          keys:
-            0 key (type: string)
-            1 key (type: string)
-          outputColumnNames: key, value, key0, value0
-          Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
-          Select Operator
-            expressions: key (type: string), value (type: string), key0 (type: string), value0 (type: string)
-            outputColumnNames: key, value, key0, value0
-            Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              isSamplingPred: false
-              predicate: ((UDFToDouble(key) > 10.0) and (UDFToDouble(key) < 20.0) and (UDFToDouble(key0) > 15.0) and (UDFToDouble(key0) < 25.0)) (type: boolean)
-              Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    properties:
-                      columns key,value,key0,value0
-                      columns.types string:string:string:string
-                      escape.delim \
-                      hive.serialization.extend.additional.nesting.levels true
-                      serialization.escape.crlf true
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                TotalFiles: 1
-                GatherStats: false
-                MultiFileSpray: false
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-#### A masked pattern was here ####
-17	val_17	17	val_17
-17	val_17	17	val_17
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-19	val_19	19	val_19
-19	val_19	19	val_19
-PREHOOK: query: EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: a
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
-              Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: key, value
-                Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: key (type: string)
-                  null sort order: a
-                  sort order: +
-                  Map-reduce partition columns: key (type: string)
-                  Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-                  tag: 0
-                  value expressions: value (type: string)
-                  auto parallelism: false
-          TableScan
-            alias: b
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
-              Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: key, value
-                Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: key (type: string)
-                  null sort order: a
-                  sort order: +
-                  Map-reduce partition columns: key (type: string)
-                  Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-                  tag: 1
-                  value expressions: value (type: string)
-                  auto parallelism: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /src [a]
-        /srcpart/ds=2008-04-08/hr=11 [b]
-        /srcpart/ds=2008-04-08/hr=12 [b]
-      Needs Tagging: true
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Right Outer Join0 to 1
-          keys:
-            0 key (type: string)
-            1 key (type: string)
-          outputColumnNames: key, value, key0, value0
-          Statistics: Num rows: 122 Data size: 1296 Basic stats: COMPLETE Column stats: NONE
-          Select Operator
-            expressions: key (type: string), value (type: string), key0 (type: string), value0 (type: string)
-            outputColumnNames: key, value, key0, value0
-            Statistics: Num rows: 122 Data size: 1296 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              isSamplingPred: false
-              predicate: ((UDFToDouble(key) > 10.0) and (UDFToDouble(key) < 20.0)) (type: boolean)
-              Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    properties:
-                      columns key,value,key0,value0
-                      columns.types string:string:string:string
-                      escape.delim \
-                      hive.serialization.extend.additional.nesting.levels true
-                      serialization.escape.crlf true
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                TotalFiles: 1
-                GatherStats: false
-                MultiFileSpray: false
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-17	val_17	17	val_17
-17	val_17	17	val_17
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-19	val_19	19	val_19
-19	val_19	19	val_19

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/cbo_rp_outer_join_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/cbo_rp_outer_join_ppr.q.out b/ql/src/test/results/clientpositive/cbo_rp_outer_join_ppr.q.out
new file mode 100644
index 0000000..200b8ee
--- /dev/null
+++ b/ql/src/test/results/clientpositive/cbo_rp_outer_join_ppr.q.out
@@ -0,0 +1,691 @@
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN EXTENDED
+ FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+PREHOOK: type: QUERY
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN EXTENDED
+ FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: a
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: key, value
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: key (type: string)
+                null sort order: a
+                sort order: +
+                Map-reduce partition columns: key (type: string)
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                tag: 0
+                value expressions: value (type: string)
+                auto parallelism: false
+          TableScan
+            alias: b
+            Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string), ds (type: string)
+              outputColumnNames: key, value, ds
+              Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+              Reduce Output Operator
+                key expressions: key (type: string)
+                null sort order: a
+                sort order: +
+                Map-reduce partition columns: key (type: string)
+                Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+                tag: 1
+                value expressions: value (type: string), ds (type: string)
+                auto parallelism: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 500
+              rawDataSize 5312
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 500
+                rawDataSize 5312
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-09
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-09
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /src [a]
+        /srcpart/ds=2008-04-08/hr=11 [b]
+        /srcpart/ds=2008-04-08/hr=12 [b]
+        /srcpart/ds=2008-04-09/hr=11 [b]
+        /srcpart/ds=2008-04-09/hr=12 [b]
+      Needs Tagging: true
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Outer Join 0 to 1
+          filter mappings:
+            1 [0, 1]
+          filter predicates:
+            0 
+            1 {(VALUE.ds = '2008-04-08')}
+          keys:
+            0 key (type: string)
+            1 key (type: string)
+          outputColumnNames: key, value, key0, value0
+          Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: key (type: string), value (type: string), key0 (type: string), value0 (type: string)
+            outputColumnNames: key, value, key0, value0
+            Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              isSamplingPred: false
+              predicate: ((UDFToDouble(key) > 10.0) and (UDFToDouble(key) < 20.0) and (UDFToDouble(key0) > 15.0) and (UDFToDouble(key0) < 25.0)) (type: boolean)
+              Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    properties:
+                      columns key,value,key0,value0
+                      columns.types string:string:string:string
+                      escape.delim \
+                      hive.serialization.extend.additional.nesting.levels true
+                      serialization.escape.crlf true
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                TotalFiles: 1
+                GatherStats: false
+                MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key AND b.ds = '2008-04-08')
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
+#### A masked pattern was here ####
+17	val_17	17	val_17
+17	val_17	17	val_17
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+19	val_19	19	val_19
+19	val_19	19	val_19
+PREHOOK: query: EXPLAIN EXTENDED
+ FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED
+ FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: a
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Filter Operator
+              isSamplingPred: false
+              predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
+              Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string), value (type: string)
+                outputColumnNames: key, value
+                Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: key (type: string)
+                  null sort order: a
+                  sort order: +
+                  Map-reduce partition columns: key (type: string)
+                  Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
+                  tag: 0
+                  value expressions: value (type: string)
+                  auto parallelism: false
+          TableScan
+            alias: b
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Filter Operator
+              isSamplingPred: false
+              predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
+              Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string), value (type: string)
+                outputColumnNames: key, value
+                Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: key (type: string)
+                  null sort order: a
+                  sort order: +
+                  Map-reduce partition columns: key (type: string)
+                  Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
+                  tag: 1
+                  value expressions: value (type: string)
+                  auto parallelism: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 500
+              rawDataSize 5312
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 500
+                rawDataSize 5312
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /src [a]
+        /srcpart/ds=2008-04-08/hr=11 [b]
+        /srcpart/ds=2008-04-08/hr=12 [b]
+      Needs Tagging: true
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Right Outer Join0 to 1
+          keys:
+            0 key (type: string)
+            1 key (type: string)
+          outputColumnNames: key, value, key0, value0
+          Statistics: Num rows: 122 Data size: 1296 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: key (type: string), value (type: string), key0 (type: string), value0 (type: string)
+            outputColumnNames: key, value, key0, value0
+            Statistics: Num rows: 122 Data size: 1296 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              isSamplingPred: false
+              predicate: ((UDFToDouble(key) > 10.0) and (UDFToDouble(key) < 20.0)) (type: boolean)
+              Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    properties:
+                      columns key,value,key0,value0
+                      columns.types string:string:string:string
+                      escape.delim \
+                      hive.serialization.extend.additional.nesting.levels true
+                      serialization.escape.crlf true
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                TotalFiles: 1
+                GatherStats: false
+                MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: FROM 
+  src a
+ FULL OUTER JOIN 
+  srcpart b 
+ ON (a.key = b.key)
+ SELECT a.key, a.value, b.key, b.value
+ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+17	val_17	17	val_17
+17	val_17	17	val_17
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+18	val_18	18	val_18
+19	val_19	19	val_19
+19	val_19	19	val_19

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/char_udf1.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/char_udf1.q.java1.7.out b/ql/src/test/results/clientpositive/char_udf1.q.java1.7.out
deleted file mode 100644
index ee1c2ae..0000000
--- a/ql/src/test/results/clientpositive/char_udf1.q.java1.7.out
+++ /dev/null
@@ -1,463 +0,0 @@
-PREHOOK: query: drop table char_udf_1
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table char_udf_1
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table char_udf_1 (c1 string, c2 string, c3 char(10), c4 char(20))
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@char_udf_1
-POSTHOOK: query: create table char_udf_1 (c1 string, c2 string, c3 char(10), c4 char(20))
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@char_udf_1
-PREHOOK: query: insert overwrite table char_udf_1
-  select key, value, key, value from src where key = '238' limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@char_udf_1
-POSTHOOK: query: insert overwrite table char_udf_1
-  select key, value, key, value from src where key = '238' limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@char_udf_1
-POSTHOOK: Lineage: char_udf_1.c1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: char_udf_1.c2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: char_udf_1.c3 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: char_udf_1.c4 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- UDFs with char support
-select 
-  concat(c1, c2),
-  concat(c3, c4),
-  concat(c1, c2) = concat(c3, c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- UDFs with char support
-select 
-  concat(c1, c2),
-  concat(c3, c4),
-  concat(c1, c2) = concat(c3, c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-238val_238	238val_238                    	true
-PREHOOK: query: select
-  upper(c2),
-  upper(c4),
-  upper(c2) = upper(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  upper(c2),
-  upper(c4),
-  upper(c2) = upper(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-VAL_238	VAL_238             	true
-PREHOOK: query: select
-  lower(c2),
-  lower(c4),
-  lower(c2) = lower(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  lower(c2),
-  lower(c4),
-  lower(c2) = lower(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238             	true
-PREHOOK: query: -- Scalar UDFs
-select
-  ascii(c2),
-  ascii(c4),
-  ascii(c2) = ascii(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: -- Scalar UDFs
-select
-  ascii(c2),
-  ascii(c4),
-  ascii(c2) = ascii(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-118	118	true
-PREHOOK: query: select 
-  concat_ws('|', c1, c2),
-  concat_ws('|', c3, c4),
-  concat_ws('|', c1, c2) = concat_ws('|', c3, c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select 
-  concat_ws('|', c1, c2),
-  concat_ws('|', c3, c4),
-  concat_ws('|', c1, c2) = concat_ws('|', c3, c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-238|val_238	238|val_238	true
-PREHOOK: query: select
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c4, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII') = decode(encode(c4, 'US-ASCII'), 'US-ASCII')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c4, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII') = decode(encode(c4, 'US-ASCII'), 'US-ASCII')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: select
-  instr(c2, '_'),
-  instr(c4, '_'),
-  instr(c2, '_') = instr(c4, '_')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  instr(c2, '_'),
-  instr(c4, '_'),
-  instr(c2, '_') = instr(c4, '_')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-4	4	true
-PREHOOK: query: select
-  length(c2),
-  length(c4),
-  length(c2) = length(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  length(c2),
-  length(c4),
-  length(c2) = length(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-7	7	true
-PREHOOK: query: select
-  locate('a', 'abcdabcd', 3),
-  locate(cast('a' as char(1)), cast('abcdabcd' as char(10)), 3),
-  locate('a', 'abcdabcd', 3) = locate(cast('a' as char(1)), cast('abcdabcd' as char(10)), 3)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  locate('a', 'abcdabcd', 3),
-  locate(cast('a' as char(1)), cast('abcdabcd' as char(10)), 3),
-  locate('a', 'abcdabcd', 3) = locate(cast('a' as char(1)), cast('abcdabcd' as char(10)), 3)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-5	5	true
-PREHOOK: query: select
-  lpad(c2, 15, ' '),
-  lpad(c4, 15, ' '),
-  lpad(c2, 15, ' ') = lpad(c4, 15, ' ')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  lpad(c2, 15, ' '),
-  lpad(c4, 15, ' '),
-  lpad(c2, 15, ' ') = lpad(c4, 15, ' ')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-        val_238	        val_238	true
-PREHOOK: query: select
-  ltrim(c2),
-  ltrim(c4),
-  ltrim(c2) = ltrim(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  ltrim(c2),
-  ltrim(c4),
-  ltrim(c2) = ltrim(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: -- In hive wiki page https://cwiki.apache.org/confluence/display/Hive/LanguageManual+UDF
--- we only allow A regexp B, not regexp (A,B).
-
-select
-  c2 regexp 'val',
-  c4 regexp 'val',
-  (c2 regexp 'val') = (c4 regexp 'val')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: -- In hive wiki page https://cwiki.apache.org/confluence/display/Hive/LanguageManual+UDF
--- we only allow A regexp B, not regexp (A,B).
-
-select
-  c2 regexp 'val',
-  c4 regexp 'val',
-  (c2 regexp 'val') = (c4 regexp 'val')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-true	true	true
-PREHOOK: query: select
-  regexp_extract(c2, 'val_([0-9]+)', 1),
-  regexp_extract(c4, 'val_([0-9]+)', 1),
-  regexp_extract(c2, 'val_([0-9]+)', 1) = regexp_extract(c4, 'val_([0-9]+)', 1)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  regexp_extract(c2, 'val_([0-9]+)', 1),
-  regexp_extract(c4, 'val_([0-9]+)', 1),
-  regexp_extract(c2, 'val_([0-9]+)', 1) = regexp_extract(c4, 'val_([0-9]+)', 1)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-238	238	true
-PREHOOK: query: select
-  regexp_replace(c2, 'val', 'replaced'),
-  regexp_replace(c4, 'val', 'replaced'),
-  regexp_replace(c2, 'val', 'replaced') = regexp_replace(c4, 'val', 'replaced')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  regexp_replace(c2, 'val', 'replaced'),
-  regexp_replace(c4, 'val', 'replaced'),
-  regexp_replace(c2, 'val', 'replaced') = regexp_replace(c4, 'val', 'replaced')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-replaced_238	replaced_238	true
-PREHOOK: query: select
-  reverse(c2),
-  reverse(c4),
-  reverse(c2) = reverse(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  reverse(c2),
-  reverse(c4),
-  reverse(c2) = reverse(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-832_lav	832_lav	true
-PREHOOK: query: select
-  rpad(c2, 15, ' '),
-  rpad(c4, 15, ' '),
-  rpad(c2, 15, ' ') = rpad(c4, 15, ' ')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  rpad(c2, 15, ' '),
-  rpad(c4, 15, ' '),
-  rpad(c2, 15, ' ') = rpad(c4, 15, ' ')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238        	val_238        	true
-PREHOOK: query: select
-  rtrim(c2),
-  rtrim(c4),
-  rtrim(c2) = rtrim(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  rtrim(c2),
-  rtrim(c4),
-  rtrim(c2) = rtrim(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: select
-  sentences('See spot run.  See jane run.'),
-  sentences(cast('See spot run.  See jane run.' as char(50)))
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  sentences('See spot run.  See jane run.'),
-  sentences(cast('See spot run.  See jane run.' as char(50)))
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-[["See","spot","run"],["See","jane","run"]]	[["See","spot","run"],["See","jane","run"]]
-PREHOOK: query: select
-  split(c2, '_'),
-  split(c4, '_')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  split(c2, '_'),
-  split(c4, '_')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-["val","238"]	["val","238"]
-PREHOOK: query: select 
-  str_to_map('a:1,b:2,c:3',',',':'),
-  str_to_map(cast('a:1,b:2,c:3' as char(20)),',',':')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select 
-  str_to_map('a:1,b:2,c:3',',',':'),
-  str_to_map(cast('a:1,b:2,c:3' as char(20)),',',':')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-{"b":"2","a":"1","c":"3"}	{"b":"2","a":"1","c":"3"}
-PREHOOK: query: select
-  substr(c2, 1, 3),
-  substr(c4, 1, 3),
-  substr(c2, 1, 3) = substr(c4, 1, 3)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  substr(c2, 1, 3),
-  substr(c4, 1, 3),
-  substr(c2, 1, 3) = substr(c4, 1, 3)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val	val	true
-PREHOOK: query: select
-  trim(c2),
-  trim(c4),
-  trim(c2) = trim(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  trim(c2),
-  trim(c4),
-  trim(c2) = trim(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: -- Aggregate Functions
-select
-  compute_stats(c2, 16),
-  compute_stats(c4, 16)
-from char_udf_1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: -- Aggregate Functions
-select
-  compute_stats(c2, 16),
-  compute_stats(c4, 16)
-from char_udf_1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1,"ndvbitvector":"{0}{3}{2}{3}{1}{0}{2}{0}{1}{0}{0}{1}{3}{2}{0}{3}"}	{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1,"ndvbitvector":"{0}{3}{2}{3}{1}{0}{2}{0}{1}{0}{0}{1}{3}{2}{0}{3}"}
-PREHOOK: query: select
-  min(c2),
-  min(c4)
-from char_udf_1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  min(c2),
-  min(c4)
-from char_udf_1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238             
-PREHOOK: query: select
-  max(c2),
-  max(c4)
-from char_udf_1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  max(c2),
-  max(c4)
-from char_udf_1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238             
-PREHOOK: query: drop table char_udf_1
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@char_udf_1
-PREHOOK: Output: default@char_udf_1
-POSTHOOK: query: drop table char_udf_1
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@char_udf_1
-POSTHOOK: Output: default@char_udf_1


[21/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.7.out
deleted file mode 100644
index dcfbec0..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.7.out
+++ /dev/null
@@ -1,591 +0,0 @@
-PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	6                   
-	numRows             	1000                
-	rawDataSize         	9624                
-	totalSize           	10898               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103, val_103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103/value=val_103, [484, val_484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(*) from list_bucketing_static_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from list_bucketing_static_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-1000
-PREHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              numFiles 6
-              numRows 1000
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 9624
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 10898
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_static_part
-          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
-            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: '484' (type: string), 'val_484' (type: string), '2008-04-08' (type: string), '11' (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	12
-PREHOOK: query: -- 51 and val_51 in the table so skewed data for 51 and val_14 should be none
--- but query should succeed for 51 or 51 and val_14
-select * from srcpart where ds = '2008-04-08' and key = '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: -- 51 and val_51 in the table so skewed data for 51 and val_14 should be none
--- but query should succeed for 51 or 51 and val_14
-select * from srcpart where ds = '2008-04-08' and key = '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	12
-51	val_51	2008-04-08	12
-PREHOOK: query: select * from list_bucketing_static_part where key = '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where key = '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-PREHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-PREHOOK: query: -- queries with < <= > >= should work for skewed test although we don't benefit from pruning
-select count(1) from srcpart where ds = '2008-04-08' and key < '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: -- queries with < <= > >= should work for skewed test although we don't benefit from pruning
-select count(1) from srcpart where ds = '2008-04-08' and key < '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-910
-PREHOOK: query: select count(1) from list_bucketing_static_part where key < '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key < '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-910
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key <= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key <= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-914
-PREHOOK: query: select count(1) from list_bucketing_static_part where key <= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key <= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-914
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key > '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key > '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-86
-PREHOOK: query: select count(1) from list_bucketing_static_part where key > '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key > '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-86
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key >= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key >= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-90
-PREHOOK: query: select count(1) from list_bucketing_static_part where key >= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key >= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-90
-PREHOOK: query: -- clean up
-drop table list_bucketing_static_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- clean up
-drop table list_bucketing_static_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Output: default@list_bucketing_static_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.8.out b/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.8.out
deleted file mode 100644
index aeeba03..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.8.out
+++ /dev/null
@@ -1,692 +0,0 @@
-PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            srcpart
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_static_part
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-                  '11'
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-      TOK_WHERE
-         =
-            TOK_TABLE_OR_COL
-               ds
-            '2008-04-08'
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [$hdt$_0:srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [$hdt$_0:srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	6                   
-	numRows             	1000                
-	rawDataSize         	9624                
-	totalSize           	10898               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484, [103, val_103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103/value=val_103}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(*) from list_bucketing_static_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from list_bucketing_static_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-1000
-PREHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            list_bucketing_static_part
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_ALLCOLREF
-      TOK_WHERE
-         and
-            and
-               and
-                  =
-                     TOK_TABLE_OR_COL
-                        ds
-                     '2008-04-08'
-                  =
-                     TOK_TABLE_OR_COL
-                        hr
-                     '11'
-               =
-                  TOK_TABLE_OR_COL
-                     key
-                  '484'
-            =
-               TOK_TABLE_OR_COL
-                  value
-               'val_484'
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: list_bucketing_static_part
-            Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
-              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: '484' (type: string), 'val_484' (type: string), '2008-04-08' (type: string), '11' (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-                  Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0,_col1,_col2,_col3
-                        columns.types string:string:string:string
-                        escape.delim \
-                        hive.serialization.extend.nesting.levels true
-                        serialization.format 1
-                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: value=val_484
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              numFiles 6
-              numRows 1000
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 9624
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 10898
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      Truncated Path -> Alias:
-        /list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484 [list_bucketing_static_part]
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	12
-PREHOOK: query: -- 51 and val_51 in the table so skewed data for 51 and val_14 should be none
--- but query should succeed for 51 or 51 and val_14
-select * from srcpart where ds = '2008-04-08' and key = '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: -- 51 and val_51 in the table so skewed data for 51 and val_14 should be none
--- but query should succeed for 51 or 51 and val_14
-select * from srcpart where ds = '2008-04-08' and key = '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	12
-51	val_51	2008-04-08	12
-PREHOOK: query: select * from list_bucketing_static_part where key = '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where key = '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-PREHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-PREHOOK: query: -- queries with < <= > >= should work for skewed test although we don't benefit from pruning
-select count(1) from srcpart where ds = '2008-04-08' and key < '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: -- queries with < <= > >= should work for skewed test although we don't benefit from pruning
-select count(1) from srcpart where ds = '2008-04-08' and key < '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-910
-PREHOOK: query: select count(1) from list_bucketing_static_part where key < '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key < '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-910
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key <= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key <= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-914
-PREHOOK: query: select count(1) from list_bucketing_static_part where key <= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key <= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-914
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key > '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key > '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-86
-PREHOOK: query: select count(1) from list_bucketing_static_part where key > '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key > '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-86
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key >= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key >= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-90
-PREHOOK: query: select count(1) from list_bucketing_static_part where key >= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key >= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-90
-PREHOOK: query: -- clean up
-drop table list_bucketing_static_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- clean up
-drop table list_bucketing_static_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Output: default@list_bucketing_static_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_2.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_2.q.out
new file mode 100644
index 0000000..a29c224
--- /dev/null
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_2.q.out
@@ -0,0 +1,589 @@
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- list bucketing DML: static partition. multiple skewed columns.
+-- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+--  5263 000000_0
+--  5263 000001_0
+-- ds=2008-04-08/hr=11/key=103/value=val_103:
+-- 99 000000_0
+-- 99 000001_0
+-- ds=2008-04-08/hr=11/key=484/value=val_484:
+-- 87 000000_0
+-- 87 000001_0
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@list_bucketing_static_part
+POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- list bucketing DML: static partition. multiple skewed columns.
+-- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+--  5263 000000_0
+--  5263 000001_0
+-- ds=2008-04-08/hr=11/key=103/value=val_103:
+-- 99 000000_0
+-- 99 000001_0
+-- ds=2008-04-08/hr=11/key=484/value=val_484:
+-- 87 000000_0
+-- 87 000001_0
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@list_bucketing_static_part
+PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/hr=11/
+                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_static_part
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_static_part { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_static_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /srcpart/ds=2008-04-08/hr=11 [srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [srcpart]
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 11
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_static_part
+ds=2008-04-08/hr=11
+PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	list_bucketing_static_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	6                   
+	numRows             	1000                
+	rawDataSize         	9624                
+	totalSize           	10898               
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484, [103, val_103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103/value=val_103}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+1000
+PREHOOK: query: select count(*) from list_bucketing_static_part
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from list_bucketing_static_part
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+1000
+PREHOOK: query: explain extended
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_static_part
+              numFiles 6
+              numRows 1000
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 9624
+              serialization.ddl struct list_bucketing_static_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 10898
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+            name: default.list_bucketing_static_part
+      Processor Tree:
+        TableScan
+          alias: list_bucketing_static_part
+          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
+            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: '484' (type: string), 'val_484' (type: string), '2008-04-08' (type: string), '11' (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+              ListSink
+
+PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+484	val_484	2008-04-08	11
+484	val_484	2008-04-08	11
+PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+484	val_484	2008-04-08	11
+484	val_484	2008-04-08	12
+PREHOOK: query: -- 51 and val_51 in the table so skewed data for 51 and val_14 should be none
+-- but query should succeed for 51 or 51 and val_14
+select * from srcpart where ds = '2008-04-08' and key = '51'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: -- 51 and val_51 in the table so skewed data for 51 and val_14 should be none
+-- but query should succeed for 51 or 51 and val_14
+select * from srcpart where ds = '2008-04-08' and key = '51'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+51	val_51	2008-04-08	11
+51	val_51	2008-04-08	11
+51	val_51	2008-04-08	12
+51	val_51	2008-04-08	12
+PREHOOK: query: select * from list_bucketing_static_part where key = '51'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select * from list_bucketing_static_part where key = '51'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+51	val_51	2008-04-08	11
+51	val_51	2008-04-08	11
+51	val_51	2008-04-08	11
+51	val_51	2008-04-08	11
+PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+PREHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+PREHOOK: query: -- queries with < <= > >= should work for skewed test although we don't benefit from pruning
+select count(1) from srcpart where ds = '2008-04-08' and key < '51'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: -- queries with < <= > >= should work for skewed test although we don't benefit from pruning
+select count(1) from srcpart where ds = '2008-04-08' and key < '51'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+910
+PREHOOK: query: select count(1) from list_bucketing_static_part where key < '51'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from list_bucketing_static_part where key < '51'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+910
+PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key <= '51'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key <= '51'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+914
+PREHOOK: query: select count(1) from list_bucketing_static_part where key <= '51'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from list_bucketing_static_part where key <= '51'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+914
+PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key > '51'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key > '51'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+86
+PREHOOK: query: select count(1) from list_bucketing_static_part where key > '51'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from list_bucketing_static_part where key > '51'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+86
+PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key >= '51'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key >= '51'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+90
+PREHOOK: query: select count(1) from list_bucketing_static_part where key >= '51'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from list_bucketing_static_part where key >= '51'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+90
+PREHOOK: query: -- clean up
+drop table list_bucketing_static_part
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Output: default@list_bucketing_static_part
+POSTHOOK: query: -- clean up
+drop table list_bucketing_static_part
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Output: default@list_bucketing_static_part


[14/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.7.out
deleted file mode 100644
index 752ea4e..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.7.out
+++ /dev/null
@@ -1,813 +0,0 @@
-PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns. merge.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103:
--- 99 000000_0
--- 99 000001_0
--- after merge
--- 142 000000_0
--- ds=2008-04-08/hr=11/key=484:
--- 87 000000_0
--- 87 000001_0
--- after merge
--- 118 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key) on ('484','103')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns. merge.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103:
--- 99 000000_0
--- 99 000001_0
--- after merge
--- 142 000000_0
--- ds=2008-04-08/hr=11/key=484:
--- 87 000000_0
--- 87 000001_0
--- after merge
--- 118 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key) on ('484','103')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	6                   
-	numRows             	1000                
-	rawDataSize         	9624                
-	totalSize           	10898               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key]               	 
-Skewed Values:      	[[484], [103]]      	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484, [103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-  Stage: Stage-3
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              partition_columns.types string:string
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-5
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              partition_columns.types string:string
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	4                   
-	numRows             	1000                
-	rawDataSize         	9624                
-	totalSize           	10786               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key]               	 
-Skewed Values:      	[[484], [103]]      	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484, [103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(*) from list_bucketing_static_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from list_bucketing_static_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-1000
-PREHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              numFiles 4
-              numRows 1000
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 9624
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 10786
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_static_part
-          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
-            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: '484' (type: string), 'val_484' (type: string), '2008-04-08' (type: string), '11' (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	12
-PREHOOK: query: -- clean up
-drop table list_bucketing_static_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- clean up
-drop table list_bucketing_static_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Output: default@list_bucketing_static_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.8.out b/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.8.out
deleted file mode 100644
index 599d3b0..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.8.out
+++ /dev/null
@@ -1,915 +0,0 @@
-PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns. merge.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103:
--- 99 000000_0
--- 99 000001_0
--- after merge
--- 142 000000_0
--- ds=2008-04-08/hr=11/key=484:
--- 87 000000_0
--- 87 000001_0
--- after merge
--- 118 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key) on ('484','103')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns. merge.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103:
--- 99 000000_0
--- 99 000001_0
--- after merge
--- 142 000000_0
--- ds=2008-04-08/hr=11/key=484:
--- 87 000000_0
--- 87 000001_0
--- after merge
--- 118 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key) on ('484','103')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            srcpart
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_static_part
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-                  '11'
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-      TOK_WHERE
-         =
-            TOK_TABLE_OR_COL
-               ds
-            '2008-04-08'
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	6                   
-	numRows             	1000                
-	rawDataSize         	9624                
-	totalSize           	10898               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key]               	 
-Skewed Values:      	[[484], [103]]      	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103, [484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            srcpart
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_static_part
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-                  '11'
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-      TOK_WHERE
-         =
-            TOK_TABLE_OR_COL
-               ds
-            '2008-04-08'
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-  Stage: Stage-3
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              partition_columns.types string:string
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-5
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              partition_columns.types string:string
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	4                   
-	numRows             	1000                
-	rawDataSize         	9624                
-	totalSize           	10786               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key]               	 
-Skewed Values:      	[[484], [103]]      	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103, [484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(*) from list_bucketing_static_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from list_bucketing_static_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-1000
-PREHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            list_bucketing_static_part
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_ALLCOLREF
-      TOK_WHERE
-         and
-            and
-               and
-                  =
-                     TOK_TABLE_OR_COL
-                        ds
-                     '2008-04-08'
-                  =
-                     TOK_TABLE_OR_COL
-                        hr
-                     '11'
-               =
-                  TOK_TABLE_OR_COL
-                     key
-                  '484'
-            =
-               TOK_TABLE_OR_COL
-                  value
-               'val_484'
-
-
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              numFiles 4
-              numRows 1000
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 9624
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 10786
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_static_part
-          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
-            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: '484' (type: string), 'val_484' (type: string), '2008-04-08' (type: string), '11' (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	12
-PREHOOK: query: -- clean up
-drop table list_bucketing_static_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- clean up
-drop table list_bucketing_static_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Output: default@list_bucketing_static_part


[12/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out b/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out
new file mode 100644
index 0000000..fc8eb1c
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.out
@@ -0,0 +1,216 @@
+PREHOOK: query: DROP TABLE over1k
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE over1k
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE over1korc
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE over1korc
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: -- data setup
+CREATE TABLE over1k(t tinyint,
+           si smallint,
+           i int,
+           b bigint,
+           f float,
+           d double,
+           bo boolean,
+           s string,
+           ts timestamp,
+           dec decimal(4,2),
+           bin binary)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k
+POSTHOOK: query: -- data setup
+CREATE TABLE over1k(t tinyint,
+           si smallint,
+           i int,
+           b bigint,
+           f float,
+           d double,
+           bo boolean,
+           s string,
+           ts timestamp,
+           dec decimal(4,2),
+           bin binary)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@over1k
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@over1k
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@over1k
+PREHOOK: query: CREATE TABLE over1korc(t tinyint,
+           si smallint,
+           i int,
+           b bigint,
+           f float,
+           d double,
+           bo boolean,
+           s string,
+           ts timestamp,
+           dec decimal(4,2),
+           bin binary)
+STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@over1korc
+POSTHOOK: query: CREATE TABLE over1korc(t tinyint,
+           si smallint,
+           i int,
+           b bigint,
+           f float,
+           d double,
+           bo boolean,
+           s string,
+           ts timestamp,
+           dec decimal(4,2),
+           bin binary)
+STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@over1korc
+PREHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over1k
+PREHOOK: Output: default@over1korc
+POSTHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over1k
+POSTHOOK: Output: default@over1korc
+POSTHOOK: Lineage: over1korc.b SIMPLE [(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
+POSTHOOK: Lineage: over1korc.bin SIMPLE [(over1k)over1k.FieldSchema(name:bin, type:binary, comment:null), ]
+POSTHOOK: Lineage: over1korc.bo SIMPLE [(over1k)over1k.FieldSchema(name:bo, type:boolean, comment:null), ]
+POSTHOOK: Lineage: over1korc.d SIMPLE [(over1k)over1k.FieldSchema(name:d, type:double, comment:null), ]
+POSTHOOK: Lineage: over1korc.dec SIMPLE [(over1k)over1k.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
+POSTHOOK: Lineage: over1korc.f SIMPLE [(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
+POSTHOOK: Lineage: over1korc.i SIMPLE [(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
+POSTHOOK: Lineage: over1korc.s SIMPLE [(over1k)over1k.FieldSchema(name:s, type:string, comment:null), ]
+POSTHOOK: Lineage: over1korc.si SIMPLE [(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
+POSTHOOK: Lineage: over1korc.t SIMPLE [(over1k)over1k.FieldSchema(name:t, type:tinyint, comment:null), ]
+POSTHOOK: Lineage: over1korc.ts SIMPLE [(over1k)over1k.FieldSchema(name:ts, type:timestamp, comment:null), ]
+PREHOOK: query: EXPLAIN SELECT 
+  i,
+  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
+  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
+  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT 
+  i,
+  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
+  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
+  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: over1korc
+                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: i (type: int)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: avg(50), avg(50.0), avg(50)
+                      keys: _col0 (type: int)
+                      mode: hash
+                      outputColumnNames: _col0, _col1, _col2, _col3
+                      Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: int)
+                        Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col1 (type: struct<count:bigint,sum:double,input:int>), _col2 (type: struct<count:bigint,sum:double,input:double>), _col3 (type: struct<count:bigint,sum:decimal(12,0),input:decimal(10,0)>)
+            Execution mode: vectorized, llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: avg(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2)
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int)
+                  sort order: +
+                  Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
+                  TopN Hash Memory Usage: 0.1
+                  value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: decimal(14,4))
+        Reducer 3 
+            Execution mode: vectorized, llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: decimal(14,4))
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 10
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT 
+  i,
+  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
+  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
+  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over1korc
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT 
+  i,
+  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
+  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
+  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over1korc
+#### A masked pattern was here ####
+65536	50.0	50.0	50.0000
+65537	50.0	50.0	50.0000
+65538	50.0	50.0	50.0000
+65539	50.0	50.0	50.0000
+65540	50.0	50.0	50.0000
+65541	50.0	50.0	50.0000
+65542	50.0	50.0	50.0000
+65543	50.0	50.0	50.0000
+65544	50.0	50.0	50.0000
+65545	50.0	50.0	50.0000

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/outer_join_ppr.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/outer_join_ppr.q.java1.7.out b/ql/src/test/results/clientpositive/outer_join_ppr.q.java1.7.out
deleted file mode 100644
index 1312e53..0000000
--- a/ql/src/test/results/clientpositive/outer_join_ppr.q.java1.7.out
+++ /dev/null
@@ -1,685 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-PREHOOK: type: QUERY
-POSTHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: a
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: _col0 (type: string)
-                null sort order: a
-                sort order: +
-                Map-reduce partition columns: _col0 (type: string)
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                tag: 0
-                value expressions: _col1 (type: string)
-                auto parallelism: false
-          TableScan
-            alias: b
-            Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string), ds (type: string)
-              outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: _col0 (type: string)
-                null sort order: a
-                sort order: +
-                Map-reduce partition columns: _col0 (type: string)
-                Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                tag: 1
-                value expressions: _col1 (type: string), _col2 (type: string)
-                auto parallelism: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-09
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-09
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /src [$hdt$_0:a]
-        /srcpart/ds=2008-04-08/hr=11 [$hdt$_1:b]
-        /srcpart/ds=2008-04-08/hr=12 [$hdt$_1:b]
-        /srcpart/ds=2008-04-09/hr=11 [$hdt$_1:b]
-        /srcpart/ds=2008-04-09/hr=12 [$hdt$_1:b]
-      Needs Tagging: true
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Outer Join 0 to 1
-          filter mappings:
-            1 [0, 1]
-          filter predicates:
-            0 
-            1 {(VALUE._col1 = '2008-04-08')}
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((UDFToDouble(_col0) > 10.0) and (UDFToDouble(_col0) < 20.0) and (UDFToDouble(_col2) > 15.0) and (UDFToDouble(_col2) < 25.0)) (type: boolean)
-            Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-            File Output Operator
-              compressed: false
-              GlobalTableId: 0
-#### A masked pattern was here ####
-              NumFilesPerFileSink: 1
-              Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  properties:
-                    columns _col0,_col1,_col2,_col3
-                    columns.types string:string:string:string
-                    escape.delim \
-                    hive.serialization.extend.additional.nesting.levels true
-                    serialization.escape.crlf true
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              TotalFiles: 1
-              GatherStats: false
-              MultiFileSpray: false
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-#### A masked pattern was here ####
-17	val_17	17	val_17
-17	val_17	17	val_17
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-19	val_19	19	val_19
-19	val_19	19	val_19
-PREHOOK: query: EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: a
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
-              Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  null sort order: a
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-                  tag: 0
-                  value expressions: _col1 (type: string)
-                  auto parallelism: false
-          TableScan
-            alias: b
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
-              Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  null sort order: a
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-                  tag: 1
-                  value expressions: _col1 (type: string)
-                  auto parallelism: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /src [$hdt$_0:a]
-        /srcpart/ds=2008-04-08/hr=11 [$hdt$_1:b]
-        /srcpart/ds=2008-04-08/hr=12 [$hdt$_1:b]
-      Needs Tagging: true
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Right Outer Join0 to 1
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 122 Data size: 1296 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((UDFToDouble(_col0) > 10.0) and (UDFToDouble(_col0) < 20.0)) (type: boolean)
-            Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-            File Output Operator
-              compressed: false
-              GlobalTableId: 0
-#### A masked pattern was here ####
-              NumFilesPerFileSink: 1
-              Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  properties:
-                    columns _col0,_col1,_col2,_col3
-                    columns.types string:string:string:string
-                    escape.delim \
-                    hive.serialization.extend.additional.nesting.levels true
-                    serialization.escape.crlf true
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              TotalFiles: 1
-              GatherStats: false
-              MultiFileSpray: false
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-17	val_17	17	val_17
-17	val_17	17	val_17
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-19	val_19	19	val_19
-19	val_19	19	val_19

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/outer_join_ppr.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/outer_join_ppr.q.java1.8.out b/ql/src/test/results/clientpositive/outer_join_ppr.q.java1.8.out
deleted file mode 100644
index b9c1a66..0000000
--- a/ql/src/test/results/clientpositive/outer_join_ppr.q.java1.8.out
+++ /dev/null
@@ -1,855 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-PREHOOK: type: QUERY
-POSTHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_FULLOUTERJOIN
-         TOK_TABREF
-            TOK_TABNAME
-               src
-            a
-         TOK_TABREF
-            TOK_TABNAME
-               srcpart
-            b
-         AND
-            =
-               .
-                  TOK_TABLE_OR_COL
-                     a
-                  key
-               .
-                  TOK_TABLE_OR_COL
-                     b
-                  key
-            =
-               .
-                  TOK_TABLE_OR_COL
-                     b
-                  ds
-               '2008-04-08'
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  a
-               key
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  a
-               value
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  b
-               key
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  b
-               value
-      TOK_WHERE
-         AND
-            AND
-               AND
-                  >
-                     .
-                        TOK_TABLE_OR_COL
-                           a
-                        key
-                     10
-                  <
-                     .
-                        TOK_TABLE_OR_COL
-                           a
-                        key
-                     20
-               >
-                  .
-                     TOK_TABLE_OR_COL
-                        b
-                     key
-                  15
-            <
-               .
-                  TOK_TABLE_OR_COL
-                     b
-                  key
-               25
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string), ds (type: string)
-              outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: _col0 (type: string)
-                sort order: +
-                Map-reduce partition columns: _col0 (type: string)
-                Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                tag: 0
-                value expressions: _col1 (type: string), _col2 (type: string)
-                auto parallelism: false
-          TableScan
-            alias: a
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: _col0 (type: string)
-                sort order: +
-                Map-reduce partition columns: _col0 (type: string)
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                tag: 1
-                value expressions: _col1 (type: string)
-                auto parallelism: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE true
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-09
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-09
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /src [$hdt$_0:$hdt$_1:a]
-        /srcpart/ds=2008-04-08/hr=11 [$hdt$_0:$hdt$_0:b]
-        /srcpart/ds=2008-04-08/hr=12 [$hdt$_0:$hdt$_0:b]
-        /srcpart/ds=2008-04-09/hr=11 [$hdt$_0:$hdt$_0:b]
-        /srcpart/ds=2008-04-09/hr=12 [$hdt$_0:$hdt$_0:b]
-      Needs Tagging: true
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Outer Join 0 to 1
-          filter mappings:
-            0 [1, 1]
-          filter predicates:
-            0 {(VALUE._col1 = '2008-04-08')}
-            1 
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0, _col1, _col3, _col4
-          Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((((UDFToDouble(_col3) > 10.0) and (UDFToDouble(_col3) < 20.0)) and (UDFToDouble(_col0) > 15.0)) and (UDFToDouble(_col0) < 25.0)) (type: boolean)
-            Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: _col3 (type: string), _col4 (type: string), _col0 (type: string), _col1 (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    properties:
-                      columns _col0,_col1,_col2,_col3
-                      columns.types string:string:string:string
-                      escape.delim \
-                      hive.serialization.extend.nesting.levels true
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                TotalFiles: 1
-                GatherStats: false
-                MultiFileSpray: false
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-#### A masked pattern was here ####
-17	val_17	17	val_17
-17	val_17	17	val_17
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-19	val_19	19	val_19
-19	val_19	19	val_19
-PREHOOK: query: EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_FULLOUTERJOIN
-         TOK_TABREF
-            TOK_TABNAME
-               src
-            a
-         TOK_TABREF
-            TOK_TABNAME
-               srcpart
-            b
-         =
-            .
-               TOK_TABLE_OR_COL
-                  a
-               key
-            .
-               TOK_TABLE_OR_COL
-                  b
-               key
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  a
-               key
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  a
-               value
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  b
-               key
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  b
-               value
-      TOK_WHERE
-         AND
-            AND
-               AND
-                  AND
-                     >
-                        .
-                           TOK_TABLE_OR_COL
-                              a
-                           key
-                        10
-                     <
-                        .
-                           TOK_TABLE_OR_COL
-                              a
-                           key
-                        20
-                  >
-                     .
-                        TOK_TABLE_OR_COL
-                           b
-                        key
-                     15
-               <
-                  .
-                     TOK_TABLE_OR_COL
-                        b
-                     key
-                  25
-            =
-               .
-                  TOK_TABLE_OR_COL
-                     b
-                  ds
-               '2008-04-08'
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: b
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
-              Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-                  tag: 0
-                  value expressions: _col1 (type: string)
-                  auto parallelism: false
-          TableScan
-            alias: a
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
-              Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col0 (type: string)
-                  Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-                  tag: 1
-                  value expressions: _col1 (type: string)
-                  auto parallelism: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE true
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /src [$hdt$_0:$hdt$_1:a]
-        /srcpart/ds=2008-04-08/hr=11 [$hdt$_0:$hdt$_0:b]
-        /srcpart/ds=2008-04-08/hr=12 [$hdt$_0:$hdt$_0:b]
-      Needs Tagging: true
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Left Outer Join0 to 1
-          keys:
-            0 _col0 (type: string)
-            1 _col0 (type: string)
-          outputColumnNames: _col0, _col1, _col3, _col4
-          Statistics: Num rows: 122 Data size: 1296 Basic stats: COMPLETE Column stats: NONE
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((UDFToDouble(_col3) > 10.0) and (UDFToDouble(_col3) < 20.0)) (type: boolean)
-            Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: _col3 (type: string), _col4 (type: string), _col0 (type: string), _col1 (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    properties:
-                      columns _col0,_col1,_col2,_col3
-                      columns.types string:string:string:string
-                      escape.delim \
-                      hive.serialization.extend.nesting.levels true
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                TotalFiles: 1
-                GatherStats: false
-                MultiFileSpray: false
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-17	val_17	17	val_17
-17	val_17	17	val_17
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-19	val_19	19	val_19
-19	val_19	19	val_19


[25/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/char_udf1.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/char_udf1.q.java1.8.out b/ql/src/test/results/clientpositive/char_udf1.q.java1.8.out
deleted file mode 100644
index 5691a06..0000000
--- a/ql/src/test/results/clientpositive/char_udf1.q.java1.8.out
+++ /dev/null
@@ -1,457 +0,0 @@
-PREHOOK: query: drop table char_udf_1
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: drop table char_udf_1
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table char_udf_1 (c1 string, c2 string, c3 char(10), c4 char(20))
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@char_udf_1
-POSTHOOK: query: create table char_udf_1 (c1 string, c2 string, c3 char(10), c4 char(20))
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@char_udf_1
-PREHOOK: query: insert overwrite table char_udf_1
-  select key, value, key, value from src where key = '238' limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@char_udf_1
-POSTHOOK: query: insert overwrite table char_udf_1
-  select key, value, key, value from src where key = '238' limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@char_udf_1
-POSTHOOK: Lineage: char_udf_1.c1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: char_udf_1.c2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: char_udf_1.c3 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: char_udf_1.c4 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- UDFs with char support
-select 
-  concat(c1, c2),
-  concat(c3, c4),
-  concat(c1, c2) = concat(c3, c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- UDFs with char support
-select 
-  concat(c1, c2),
-  concat(c3, c4),
-  concat(c1, c2) = concat(c3, c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-238val_238	238val_238                    	true
-PREHOOK: query: select
-  upper(c2),
-  upper(c4),
-  upper(c2) = upper(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  upper(c2),
-  upper(c4),
-  upper(c2) = upper(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-VAL_238	VAL_238             	true
-PREHOOK: query: select
-  lower(c2),
-  lower(c4),
-  lower(c2) = lower(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  lower(c2),
-  lower(c4),
-  lower(c2) = lower(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238             	true
-PREHOOK: query: -- Scalar UDFs
-select
-  ascii(c2),
-  ascii(c4),
-  ascii(c2) = ascii(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: -- Scalar UDFs
-select
-  ascii(c2),
-  ascii(c4),
-  ascii(c2) = ascii(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-118	118	true
-PREHOOK: query: select 
-  concat_ws('|', c1, c2),
-  concat_ws('|', c3, c4),
-  concat_ws('|', c1, c2) = concat_ws('|', c3, c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select 
-  concat_ws('|', c1, c2),
-  concat_ws('|', c3, c4),
-  concat_ws('|', c1, c2) = concat_ws('|', c3, c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-238|val_238	238|val_238	true
-PREHOOK: query: select
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c4, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII') = decode(encode(c4, 'US-ASCII'), 'US-ASCII')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c4, 'US-ASCII'), 'US-ASCII'),
-  decode(encode(c2, 'US-ASCII'), 'US-ASCII') = decode(encode(c4, 'US-ASCII'), 'US-ASCII')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: select
-  instr(c2, '_'),
-  instr(c4, '_'),
-  instr(c2, '_') = instr(c4, '_')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  instr(c2, '_'),
-  instr(c4, '_'),
-  instr(c2, '_') = instr(c4, '_')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-4	4	true
-PREHOOK: query: select
-  length(c2),
-  length(c4),
-  length(c2) = length(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  length(c2),
-  length(c4),
-  length(c2) = length(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-7	7	true
-PREHOOK: query: select
-  locate('a', 'abcdabcd', 3),
-  locate(cast('a' as char(1)), cast('abcdabcd' as char(10)), 3),
-  locate('a', 'abcdabcd', 3) = locate(cast('a' as char(1)), cast('abcdabcd' as char(10)), 3)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  locate('a', 'abcdabcd', 3),
-  locate(cast('a' as char(1)), cast('abcdabcd' as char(10)), 3),
-  locate('a', 'abcdabcd', 3) = locate(cast('a' as char(1)), cast('abcdabcd' as char(10)), 3)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-5	5	true
-PREHOOK: query: select
-  lpad(c2, 15, ' '),
-  lpad(c4, 15, ' '),
-  lpad(c2, 15, ' ') = lpad(c4, 15, ' ')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  lpad(c2, 15, ' '),
-  lpad(c4, 15, ' '),
-  lpad(c2, 15, ' ') = lpad(c4, 15, ' ')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-        val_238	        val_238	true
-PREHOOK: query: select
-  ltrim(c2),
-  ltrim(c4),
-  ltrim(c2) = ltrim(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  ltrim(c2),
-  ltrim(c4),
-  ltrim(c2) = ltrim(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: select
-  regexp(c2, 'val'),
-  regexp(c4, 'val'),
-  regexp(c2, 'val') = regexp(c4, 'val')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  regexp(c2, 'val'),
-  regexp(c4, 'val'),
-  regexp(c2, 'val') = regexp(c4, 'val')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-true	true	true
-PREHOOK: query: select
-  regexp_extract(c2, 'val_([0-9]+)', 1),
-  regexp_extract(c4, 'val_([0-9]+)', 1),
-  regexp_extract(c2, 'val_([0-9]+)', 1) = regexp_extract(c4, 'val_([0-9]+)', 1)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  regexp_extract(c2, 'val_([0-9]+)', 1),
-  regexp_extract(c4, 'val_([0-9]+)', 1),
-  regexp_extract(c2, 'val_([0-9]+)', 1) = regexp_extract(c4, 'val_([0-9]+)', 1)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-238	238	true
-PREHOOK: query: select
-  regexp_replace(c2, 'val', 'replaced'),
-  regexp_replace(c4, 'val', 'replaced'),
-  regexp_replace(c2, 'val', 'replaced') = regexp_replace(c4, 'val', 'replaced')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  regexp_replace(c2, 'val', 'replaced'),
-  regexp_replace(c4, 'val', 'replaced'),
-  regexp_replace(c2, 'val', 'replaced') = regexp_replace(c4, 'val', 'replaced')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-replaced_238	replaced_238	true
-PREHOOK: query: select
-  reverse(c2),
-  reverse(c4),
-  reverse(c2) = reverse(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  reverse(c2),
-  reverse(c4),
-  reverse(c2) = reverse(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-832_lav	832_lav	true
-PREHOOK: query: select
-  rpad(c2, 15, ' '),
-  rpad(c4, 15, ' '),
-  rpad(c2, 15, ' ') = rpad(c4, 15, ' ')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  rpad(c2, 15, ' '),
-  rpad(c4, 15, ' '),
-  rpad(c2, 15, ' ') = rpad(c4, 15, ' ')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238        	val_238        	true
-PREHOOK: query: select
-  rtrim(c2),
-  rtrim(c4),
-  rtrim(c2) = rtrim(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  rtrim(c2),
-  rtrim(c4),
-  rtrim(c2) = rtrim(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: select
-  sentences('See spot run.  See jane run.'),
-  sentences(cast('See spot run.  See jane run.' as char(50)))
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  sentences('See spot run.  See jane run.'),
-  sentences(cast('See spot run.  See jane run.' as char(50)))
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-[["See","spot","run"],["See","jane","run"]]	[["See","spot","run"],["See","jane","run"]]
-PREHOOK: query: select
-  split(c2, '_'),
-  split(c4, '_')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  split(c2, '_'),
-  split(c4, '_')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-["val","238"]	["val","238"]
-PREHOOK: query: select 
-  str_to_map('a:1,b:2,c:3',',',':'),
-  str_to_map(cast('a:1,b:2,c:3' as char(20)),',',':')
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select 
-  str_to_map('a:1,b:2,c:3',',',':'),
-  str_to_map(cast('a:1,b:2,c:3' as char(20)),',',':')
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-{"a":"1","b":"2","c":"3"}	{"a":"1","b":"2","c":"3"}
-PREHOOK: query: select
-  substr(c2, 1, 3),
-  substr(c4, 1, 3),
-  substr(c2, 1, 3) = substr(c4, 1, 3)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  substr(c2, 1, 3),
-  substr(c4, 1, 3),
-  substr(c2, 1, 3) = substr(c4, 1, 3)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val	val	true
-PREHOOK: query: select
-  trim(c2),
-  trim(c4),
-  trim(c2) = trim(c4)
-from char_udf_1 limit 1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  trim(c2),
-  trim(c4),
-  trim(c2) = trim(c4)
-from char_udf_1 limit 1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238	true
-PREHOOK: query: -- Aggregate Functions
-select
-  compute_stats(c2, 16),
-  compute_stats(c4, 16)
-from char_udf_1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: -- Aggregate Functions
-select
-  compute_stats(c2, 16),
-  compute_stats(c4, 16)
-from char_udf_1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1}	{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1}
-PREHOOK: query: select
-  min(c2),
-  min(c4)
-from char_udf_1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  min(c2),
-  min(c4)
-from char_udf_1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238             
-PREHOOK: query: select
-  max(c2),
-  max(c4)
-from char_udf_1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-POSTHOOK: query: select
-  max(c2),
-  max(c4)
-from char_udf_1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@char_udf_1
-#### A masked pattern was here ####
-val_238	val_238             
-PREHOOK: query: drop table char_udf_1
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@char_udf_1
-PREHOOK: Output: default@char_udf_1
-POSTHOOK: query: drop table char_udf_1
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@char_udf_1
-POSTHOOK: Output: default@char_udf_1

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/char_udf1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/char_udf1.q.out b/ql/src/test/results/clientpositive/char_udf1.q.out
new file mode 100644
index 0000000..d84237a
--- /dev/null
+++ b/ql/src/test/results/clientpositive/char_udf1.q.out
@@ -0,0 +1,459 @@
+PREHOOK: query: drop table char_udf_1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table char_udf_1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table char_udf_1 (c1 string, c2 string, c3 char(10), c4 char(20))
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@char_udf_1
+POSTHOOK: query: create table char_udf_1 (c1 string, c2 string, c3 char(10), c4 char(20))
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@char_udf_1
+PREHOOK: query: insert overwrite table char_udf_1
+  select key, value, key, value from src where key = '238' limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@char_udf_1
+POSTHOOK: query: insert overwrite table char_udf_1
+  select key, value, key, value from src where key = '238' limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@char_udf_1
+POSTHOOK: Lineage: char_udf_1.c1 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: char_udf_1.c2 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: char_udf_1.c3 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: char_udf_1.c4 EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- UDFs with char support
+select 
+  concat(c1, c2),
+  concat(c3, c4),
+  concat(c1, c2) = concat(c3, c4)
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: -- UDFs with char support
+select 
+  concat(c1, c2),
+  concat(c3, c4),
+  concat(c1, c2) = concat(c3, c4)
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+238val_238	238val_238                    	true
+PREHOOK: query: select
+  upper(c2),
+  upper(c4),
+  upper(c2) = upper(c4)
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  upper(c2),
+  upper(c4),
+  upper(c2) = upper(c4)
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+VAL_238	VAL_238             	true
+PREHOOK: query: select
+  lower(c2),
+  lower(c4),
+  lower(c2) = lower(c4)
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  lower(c2),
+  lower(c4),
+  lower(c2) = lower(c4)
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+val_238	val_238             	true
+PREHOOK: query: -- Scalar UDFs
+select
+  ascii(c2),
+  ascii(c4),
+  ascii(c2) = ascii(c4)
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: -- Scalar UDFs
+select
+  ascii(c2),
+  ascii(c4),
+  ascii(c2) = ascii(c4)
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+118	118	true
+PREHOOK: query: select 
+  concat_ws('|', c1, c2),
+  concat_ws('|', c3, c4),
+  concat_ws('|', c1, c2) = concat_ws('|', c3, c4)
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select 
+  concat_ws('|', c1, c2),
+  concat_ws('|', c3, c4),
+  concat_ws('|', c1, c2) = concat_ws('|', c3, c4)
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+238|val_238	238|val_238	true
+PREHOOK: query: select
+  decode(encode(c2, 'US-ASCII'), 'US-ASCII'),
+  decode(encode(c4, 'US-ASCII'), 'US-ASCII'),
+  decode(encode(c2, 'US-ASCII'), 'US-ASCII') = decode(encode(c4, 'US-ASCII'), 'US-ASCII')
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  decode(encode(c2, 'US-ASCII'), 'US-ASCII'),
+  decode(encode(c4, 'US-ASCII'), 'US-ASCII'),
+  decode(encode(c2, 'US-ASCII'), 'US-ASCII') = decode(encode(c4, 'US-ASCII'), 'US-ASCII')
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+val_238	val_238	true
+PREHOOK: query: select
+  instr(c2, '_'),
+  instr(c4, '_'),
+  instr(c2, '_') = instr(c4, '_')
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  instr(c2, '_'),
+  instr(c4, '_'),
+  instr(c2, '_') = instr(c4, '_')
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+4	4	true
+PREHOOK: query: select
+  length(c2),
+  length(c4),
+  length(c2) = length(c4)
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  length(c2),
+  length(c4),
+  length(c2) = length(c4)
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+7	7	true
+PREHOOK: query: select
+  locate('a', 'abcdabcd', 3),
+  locate(cast('a' as char(1)), cast('abcdabcd' as char(10)), 3),
+  locate('a', 'abcdabcd', 3) = locate(cast('a' as char(1)), cast('abcdabcd' as char(10)), 3)
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  locate('a', 'abcdabcd', 3),
+  locate(cast('a' as char(1)), cast('abcdabcd' as char(10)), 3),
+  locate('a', 'abcdabcd', 3) = locate(cast('a' as char(1)), cast('abcdabcd' as char(10)), 3)
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+5	5	true
+PREHOOK: query: select
+  lpad(c2, 15, ' '),
+  lpad(c4, 15, ' '),
+  lpad(c2, 15, ' ') = lpad(c4, 15, ' ')
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  lpad(c2, 15, ' '),
+  lpad(c4, 15, ' '),
+  lpad(c2, 15, ' ') = lpad(c4, 15, ' ')
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+        val_238	        val_238	true
+PREHOOK: query: select
+  ltrim(c2),
+  ltrim(c4),
+  ltrim(c2) = ltrim(c4)
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  ltrim(c2),
+  ltrim(c4),
+  ltrim(c2) = ltrim(c4)
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+val_238	val_238	true
+PREHOOK: query: -- In hive wiki page https://cwiki.apache.org/confluence/display/Hive/LanguageManual+UDF
+-- we only allow A regexp B, not regexp (A,B).
+
+select
+  c2 regexp 'val',
+  c4 regexp 'val',
+  (c2 regexp 'val') = (c4 regexp 'val')
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: -- In hive wiki page https://cwiki.apache.org/confluence/display/Hive/LanguageManual+UDF
+-- we only allow A regexp B, not regexp (A,B).
+
+select
+  c2 regexp 'val',
+  c4 regexp 'val',
+  (c2 regexp 'val') = (c4 regexp 'val')
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+true	true	true
+PREHOOK: query: select
+  regexp_extract(c2, 'val_([0-9]+)', 1),
+  regexp_extract(c4, 'val_([0-9]+)', 1),
+  regexp_extract(c2, 'val_([0-9]+)', 1) = regexp_extract(c4, 'val_([0-9]+)', 1)
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  regexp_extract(c2, 'val_([0-9]+)', 1),
+  regexp_extract(c4, 'val_([0-9]+)', 1),
+  regexp_extract(c2, 'val_([0-9]+)', 1) = regexp_extract(c4, 'val_([0-9]+)', 1)
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+238	238	true
+PREHOOK: query: select
+  regexp_replace(c2, 'val', 'replaced'),
+  regexp_replace(c4, 'val', 'replaced'),
+  regexp_replace(c2, 'val', 'replaced') = regexp_replace(c4, 'val', 'replaced')
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  regexp_replace(c2, 'val', 'replaced'),
+  regexp_replace(c4, 'val', 'replaced'),
+  regexp_replace(c2, 'val', 'replaced') = regexp_replace(c4, 'val', 'replaced')
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+replaced_238	replaced_238	true
+PREHOOK: query: select
+  reverse(c2),
+  reverse(c4),
+  reverse(c2) = reverse(c4)
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  reverse(c2),
+  reverse(c4),
+  reverse(c2) = reverse(c4)
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+832_lav	832_lav	true
+PREHOOK: query: select
+  rpad(c2, 15, ' '),
+  rpad(c4, 15, ' '),
+  rpad(c2, 15, ' ') = rpad(c4, 15, ' ')
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  rpad(c2, 15, ' '),
+  rpad(c4, 15, ' '),
+  rpad(c2, 15, ' ') = rpad(c4, 15, ' ')
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+val_238        	val_238        	true
+PREHOOK: query: select
+  rtrim(c2),
+  rtrim(c4),
+  rtrim(c2) = rtrim(c4)
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  rtrim(c2),
+  rtrim(c4),
+  rtrim(c2) = rtrim(c4)
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+val_238	val_238	true
+PREHOOK: query: select
+  sentences('See spot run.  See jane run.'),
+  sentences(cast('See spot run.  See jane run.' as char(50)))
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  sentences('See spot run.  See jane run.'),
+  sentences(cast('See spot run.  See jane run.' as char(50)))
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+[["See","spot","run"],["See","jane","run"]]	[["See","spot","run"],["See","jane","run"]]
+PREHOOK: query: select
+  split(c2, '_'),
+  split(c4, '_')
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  split(c2, '_'),
+  split(c4, '_')
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+["val","238"]	["val","238"]
+PREHOOK: query: select 
+  str_to_map('a:1,b:2,c:3',',',':'),
+  str_to_map(cast('a:1,b:2,c:3' as char(20)),',',':')
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select 
+  str_to_map('a:1,b:2,c:3',',',':'),
+  str_to_map(cast('a:1,b:2,c:3' as char(20)),',',':')
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+{"a":"1","b":"2","c":"3"}	{"a":"1","b":"2","c":"3"}
+PREHOOK: query: select
+  substr(c2, 1, 3),
+  substr(c4, 1, 3),
+  substr(c2, 1, 3) = substr(c4, 1, 3)
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  substr(c2, 1, 3),
+  substr(c4, 1, 3),
+  substr(c2, 1, 3) = substr(c4, 1, 3)
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+val	val	true
+PREHOOK: query: select
+  trim(c2),
+  trim(c4),
+  trim(c2) = trim(c4)
+from char_udf_1 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  trim(c2),
+  trim(c4),
+  trim(c2) = trim(c4)
+from char_udf_1 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+val_238	val_238	true
+PREHOOK: query: -- Aggregate Functions
+select
+  compute_stats(c2, 16),
+  compute_stats(c4, 16)
+from char_udf_1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: -- Aggregate Functions
+select
+  compute_stats(c2, 16),
+  compute_stats(c4, 16)
+from char_udf_1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1,"ndvbitvector":"{0}{3}{2}{3}{1}{0}{2}{0}{1}{0}{0}{1}{3}{2}{0}{3}"}	{"columntype":"String","maxlength":7,"avglength":7.0,"countnulls":0,"numdistinctvalues":1,"ndvbitvector":"{0}{3}{2}{3}{1}{0}{2}{0}{1}{0}{0}{1}{3}{2}{0}{3}"}
+PREHOOK: query: select
+  min(c2),
+  min(c4)
+from char_udf_1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  min(c2),
+  min(c4)
+from char_udf_1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+val_238	val_238             
+PREHOOK: query: select
+  max(c2),
+  max(c4)
+from char_udf_1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  max(c2),
+  max(c4)
+from char_udf_1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@char_udf_1
+#### A masked pattern was here ####
+val_238	val_238             
+PREHOOK: query: drop table char_udf_1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@char_udf_1
+PREHOOK: Output: default@char_udf_1
+POSTHOOK: query: drop table char_udf_1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@char_udf_1
+POSTHOOK: Output: default@char_udf_1

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/input4.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input4.q.java1.7.out b/ql/src/test/results/clientpositive/input4.q.java1.7.out
deleted file mode 100644
index eaeedcb..0000000
--- a/ql/src/test/results/clientpositive/input4.q.java1.7.out
+++ /dev/null
@@ -1,559 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@INPUT4
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@INPUT4
-PREHOOK: query: EXPLAIN
-LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
-PREHOOK: type: LOAD
-POSTHOOK: query: EXPLAIN
-LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
-POSTHOOK: type: LOAD
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-  Stage-1 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.input4
-
-  Stage: Stage-1
-    Stats-Aggr Operator
-
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@input4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@input4
-PREHOOK: query: EXPLAIN FORMATTED
-SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN FORMATTED
-SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
-POSTHOOK: type: QUERY
-{"STAGE DEPENDENCIES":{"Stage-0":{"ROOT STAGE":"TRUE"}},"STAGE PLANS":{"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"TableScan":{"alias:":"input4alias","Statistics:":"Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE","children":{"Select Operator":{"expressions:":"value (type: string), key (type: string)","outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE","children":{"ListSink":{}}}}}}}}}}
-PREHOOK: query: SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
-PREHOOK: type: QUERY
-PREHOOK: Input: default@input4
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@input4
-#### A masked pattern was here ####
-val_238	238
-val_86	86
-val_311	311
-val_27	27
-val_165	165
-val_409	409
-val_255	255
-val_278	278
-val_98	98
-val_484	484
-val_265	265
-val_193	193
-val_401	401
-val_150	150
-val_273	273
-val_224	224
-val_369	369
-val_66	66
-val_128	128
-val_213	213
-val_146	146
-val_406	406
-val_429	429
-val_374	374
-val_152	152
-val_469	469
-val_145	145
-val_495	495
-val_37	37
-val_327	327
-val_281	281
-val_277	277
-val_209	209
-val_15	15
-val_82	82
-val_403	403
-val_166	166
-val_417	417
-val_430	430
-val_252	252
-val_292	292
-val_219	219
-val_287	287
-val_153	153
-val_193	193
-val_338	338
-val_446	446
-val_459	459
-val_394	394
-val_237	237
-val_482	482
-val_174	174
-val_413	413
-val_494	494
-val_207	207
-val_199	199
-val_466	466
-val_208	208
-val_174	174
-val_399	399
-val_396	396
-val_247	247
-val_417	417
-val_489	489
-val_162	162
-val_377	377
-val_397	397
-val_309	309
-val_365	365
-val_266	266
-val_439	439
-val_342	342
-val_367	367
-val_325	325
-val_167	167
-val_195	195
-val_475	475
-val_17	17
-val_113	113
-val_155	155
-val_203	203
-val_339	339
-val_0	0
-val_455	455
-val_128	128
-val_311	311
-val_316	316
-val_57	57
-val_302	302
-val_205	205
-val_149	149
-val_438	438
-val_345	345
-val_129	129
-val_170	170
-val_20	20
-val_489	489
-val_157	157
-val_378	378
-val_221	221
-val_92	92
-val_111	111
-val_47	47
-val_72	72
-val_4	4
-val_280	280
-val_35	35
-val_427	427
-val_277	277
-val_208	208
-val_356	356
-val_399	399
-val_169	169
-val_382	382
-val_498	498
-val_125	125
-val_386	386
-val_437	437
-val_469	469
-val_192	192
-val_286	286
-val_187	187
-val_176	176
-val_54	54
-val_459	459
-val_51	51
-val_138	138
-val_103	103
-val_239	239
-val_213	213
-val_216	216
-val_430	430
-val_278	278
-val_176	176
-val_289	289
-val_221	221
-val_65	65
-val_318	318
-val_332	332
-val_311	311
-val_275	275
-val_137	137
-val_241	241
-val_83	83
-val_333	333
-val_180	180
-val_284	284
-val_12	12
-val_230	230
-val_181	181
-val_67	67
-val_260	260
-val_404	404
-val_384	384
-val_489	489
-val_353	353
-val_373	373
-val_272	272
-val_138	138
-val_217	217
-val_84	84
-val_348	348
-val_466	466
-val_58	58
-val_8	8
-val_411	411
-val_230	230
-val_208	208
-val_348	348
-val_24	24
-val_463	463
-val_431	431
-val_179	179
-val_172	172
-val_42	42
-val_129	129
-val_158	158
-val_119	119
-val_496	496
-val_0	0
-val_322	322
-val_197	197
-val_468	468
-val_393	393
-val_454	454
-val_100	100
-val_298	298
-val_199	199
-val_191	191
-val_418	418
-val_96	96
-val_26	26
-val_165	165
-val_327	327
-val_230	230
-val_205	205
-val_120	120
-val_131	131
-val_51	51
-val_404	404
-val_43	43
-val_436	436
-val_156	156
-val_469	469
-val_468	468
-val_308	308
-val_95	95
-val_196	196
-val_288	288
-val_481	481
-val_457	457
-val_98	98
-val_282	282
-val_197	197
-val_187	187
-val_318	318
-val_318	318
-val_409	409
-val_470	470
-val_137	137
-val_369	369
-val_316	316
-val_169	169
-val_413	413
-val_85	85
-val_77	77
-val_0	0
-val_490	490
-val_87	87
-val_364	364
-val_179	179
-val_118	118
-val_134	134
-val_395	395
-val_282	282
-val_138	138
-val_238	238
-val_419	419
-val_15	15
-val_118	118
-val_72	72
-val_90	90
-val_307	307
-val_19	19
-val_435	435
-val_10	10
-val_277	277
-val_273	273
-val_306	306
-val_224	224
-val_309	309
-val_389	389
-val_327	327
-val_242	242
-val_369	369
-val_392	392
-val_272	272
-val_331	331
-val_401	401
-val_242	242
-val_452	452
-val_177	177
-val_226	226
-val_5	5
-val_497	497
-val_402	402
-val_396	396
-val_317	317
-val_395	395
-val_58	58
-val_35	35
-val_336	336
-val_95	95
-val_11	11
-val_168	168
-val_34	34
-val_229	229
-val_233	233
-val_143	143
-val_472	472
-val_322	322
-val_498	498
-val_160	160
-val_195	195
-val_42	42
-val_321	321
-val_430	430
-val_119	119
-val_489	489
-val_458	458
-val_78	78
-val_76	76
-val_41	41
-val_223	223
-val_492	492
-val_149	149
-val_449	449
-val_218	218
-val_228	228
-val_138	138
-val_453	453
-val_30	30
-val_209	209
-val_64	64
-val_468	468
-val_76	76
-val_74	74
-val_342	342
-val_69	69
-val_230	230
-val_33	33
-val_368	368
-val_103	103
-val_296	296
-val_113	113
-val_216	216
-val_367	367
-val_344	344
-val_167	167
-val_274	274
-val_219	219
-val_239	239
-val_485	485
-val_116	116
-val_223	223
-val_256	256
-val_263	263
-val_70	70
-val_487	487
-val_480	480
-val_401	401
-val_288	288
-val_191	191
-val_5	5
-val_244	244
-val_438	438
-val_128	128
-val_467	467
-val_432	432
-val_202	202
-val_316	316
-val_229	229
-val_469	469
-val_463	463
-val_280	280
-val_2	2
-val_35	35
-val_283	283
-val_331	331
-val_235	235
-val_80	80
-val_44	44
-val_193	193
-val_321	321
-val_335	335
-val_104	104
-val_466	466
-val_366	366
-val_175	175
-val_403	403
-val_483	483
-val_53	53
-val_105	105
-val_257	257
-val_406	406
-val_409	409
-val_190	190
-val_406	406
-val_401	401
-val_114	114
-val_258	258
-val_90	90
-val_203	203
-val_262	262
-val_348	348
-val_424	424
-val_12	12
-val_396	396
-val_201	201
-val_217	217
-val_164	164
-val_431	431
-val_454	454
-val_478	478
-val_298	298
-val_125	125
-val_431	431
-val_164	164
-val_424	424
-val_187	187
-val_382	382
-val_5	5
-val_70	70
-val_397	397
-val_480	480
-val_291	291
-val_24	24
-val_351	351
-val_255	255
-val_104	104
-val_70	70
-val_163	163
-val_438	438
-val_119	119
-val_414	414
-val_200	200
-val_491	491
-val_237	237
-val_439	439
-val_360	360
-val_248	248
-val_479	479
-val_305	305
-val_417	417
-val_199	199
-val_444	444
-val_120	120
-val_429	429
-val_169	169
-val_443	443
-val_323	323
-val_325	325
-val_277	277
-val_230	230
-val_478	478
-val_178	178
-val_468	468
-val_310	310
-val_317	317
-val_333	333
-val_493	493
-val_460	460
-val_207	207
-val_249	249
-val_265	265
-val_480	480
-val_83	83
-val_136	136
-val_353	353
-val_172	172
-val_214	214
-val_462	462
-val_233	233
-val_406	406
-val_133	133
-val_175	175
-val_189	189
-val_454	454
-val_375	375
-val_401	401
-val_421	421
-val_407	407
-val_384	384
-val_256	256
-val_26	26
-val_134	134
-val_67	67
-val_384	384
-val_379	379
-val_18	18
-val_462	462
-val_492	492
-val_100	100
-val_298	298
-val_9	9
-val_341	341
-val_498	498
-val_146	146
-val_458	458
-val_362	362
-val_186	186
-val_285	285
-val_348	348
-val_167	167
-val_18	18
-val_273	273
-val_183	183
-val_281	281
-val_344	344
-val_97	97
-val_469	469
-val_315	315
-val_84	84
-val_28	28
-val_37	37
-val_448	448
-val_152	152
-val_348	348
-val_307	307
-val_194	194
-val_414	414
-val_477	477
-val_222	222
-val_126	126
-val_90	90
-val_169	169
-val_403	403
-val_400	400
-val_200	200
-val_97	97

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/input4.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input4.q.java1.8.out b/ql/src/test/results/clientpositive/input4.q.java1.8.out
deleted file mode 100644
index eaeedcb..0000000
--- a/ql/src/test/results/clientpositive/input4.q.java1.8.out
+++ /dev/null
@@ -1,559 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@INPUT4
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@INPUT4
-PREHOOK: query: EXPLAIN
-LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
-PREHOOK: type: LOAD
-POSTHOOK: query: EXPLAIN
-LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
-POSTHOOK: type: LOAD
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-  Stage-1 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.input4
-
-  Stage: Stage-1
-    Stats-Aggr Operator
-
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@input4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@input4
-PREHOOK: query: EXPLAIN FORMATTED
-SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN FORMATTED
-SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
-POSTHOOK: type: QUERY
-{"STAGE DEPENDENCIES":{"Stage-0":{"ROOT STAGE":"TRUE"}},"STAGE PLANS":{"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"TableScan":{"alias:":"input4alias","Statistics:":"Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE","children":{"Select Operator":{"expressions:":"value (type: string), key (type: string)","outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE","children":{"ListSink":{}}}}}}}}}}
-PREHOOK: query: SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
-PREHOOK: type: QUERY
-PREHOOK: Input: default@input4
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@input4
-#### A masked pattern was here ####
-val_238	238
-val_86	86
-val_311	311
-val_27	27
-val_165	165
-val_409	409
-val_255	255
-val_278	278
-val_98	98
-val_484	484
-val_265	265
-val_193	193
-val_401	401
-val_150	150
-val_273	273
-val_224	224
-val_369	369
-val_66	66
-val_128	128
-val_213	213
-val_146	146
-val_406	406
-val_429	429
-val_374	374
-val_152	152
-val_469	469
-val_145	145
-val_495	495
-val_37	37
-val_327	327
-val_281	281
-val_277	277
-val_209	209
-val_15	15
-val_82	82
-val_403	403
-val_166	166
-val_417	417
-val_430	430
-val_252	252
-val_292	292
-val_219	219
-val_287	287
-val_153	153
-val_193	193
-val_338	338
-val_446	446
-val_459	459
-val_394	394
-val_237	237
-val_482	482
-val_174	174
-val_413	413
-val_494	494
-val_207	207
-val_199	199
-val_466	466
-val_208	208
-val_174	174
-val_399	399
-val_396	396
-val_247	247
-val_417	417
-val_489	489
-val_162	162
-val_377	377
-val_397	397
-val_309	309
-val_365	365
-val_266	266
-val_439	439
-val_342	342
-val_367	367
-val_325	325
-val_167	167
-val_195	195
-val_475	475
-val_17	17
-val_113	113
-val_155	155
-val_203	203
-val_339	339
-val_0	0
-val_455	455
-val_128	128
-val_311	311
-val_316	316
-val_57	57
-val_302	302
-val_205	205
-val_149	149
-val_438	438
-val_345	345
-val_129	129
-val_170	170
-val_20	20
-val_489	489
-val_157	157
-val_378	378
-val_221	221
-val_92	92
-val_111	111
-val_47	47
-val_72	72
-val_4	4
-val_280	280
-val_35	35
-val_427	427
-val_277	277
-val_208	208
-val_356	356
-val_399	399
-val_169	169
-val_382	382
-val_498	498
-val_125	125
-val_386	386
-val_437	437
-val_469	469
-val_192	192
-val_286	286
-val_187	187
-val_176	176
-val_54	54
-val_459	459
-val_51	51
-val_138	138
-val_103	103
-val_239	239
-val_213	213
-val_216	216
-val_430	430
-val_278	278
-val_176	176
-val_289	289
-val_221	221
-val_65	65
-val_318	318
-val_332	332
-val_311	311
-val_275	275
-val_137	137
-val_241	241
-val_83	83
-val_333	333
-val_180	180
-val_284	284
-val_12	12
-val_230	230
-val_181	181
-val_67	67
-val_260	260
-val_404	404
-val_384	384
-val_489	489
-val_353	353
-val_373	373
-val_272	272
-val_138	138
-val_217	217
-val_84	84
-val_348	348
-val_466	466
-val_58	58
-val_8	8
-val_411	411
-val_230	230
-val_208	208
-val_348	348
-val_24	24
-val_463	463
-val_431	431
-val_179	179
-val_172	172
-val_42	42
-val_129	129
-val_158	158
-val_119	119
-val_496	496
-val_0	0
-val_322	322
-val_197	197
-val_468	468
-val_393	393
-val_454	454
-val_100	100
-val_298	298
-val_199	199
-val_191	191
-val_418	418
-val_96	96
-val_26	26
-val_165	165
-val_327	327
-val_230	230
-val_205	205
-val_120	120
-val_131	131
-val_51	51
-val_404	404
-val_43	43
-val_436	436
-val_156	156
-val_469	469
-val_468	468
-val_308	308
-val_95	95
-val_196	196
-val_288	288
-val_481	481
-val_457	457
-val_98	98
-val_282	282
-val_197	197
-val_187	187
-val_318	318
-val_318	318
-val_409	409
-val_470	470
-val_137	137
-val_369	369
-val_316	316
-val_169	169
-val_413	413
-val_85	85
-val_77	77
-val_0	0
-val_490	490
-val_87	87
-val_364	364
-val_179	179
-val_118	118
-val_134	134
-val_395	395
-val_282	282
-val_138	138
-val_238	238
-val_419	419
-val_15	15
-val_118	118
-val_72	72
-val_90	90
-val_307	307
-val_19	19
-val_435	435
-val_10	10
-val_277	277
-val_273	273
-val_306	306
-val_224	224
-val_309	309
-val_389	389
-val_327	327
-val_242	242
-val_369	369
-val_392	392
-val_272	272
-val_331	331
-val_401	401
-val_242	242
-val_452	452
-val_177	177
-val_226	226
-val_5	5
-val_497	497
-val_402	402
-val_396	396
-val_317	317
-val_395	395
-val_58	58
-val_35	35
-val_336	336
-val_95	95
-val_11	11
-val_168	168
-val_34	34
-val_229	229
-val_233	233
-val_143	143
-val_472	472
-val_322	322
-val_498	498
-val_160	160
-val_195	195
-val_42	42
-val_321	321
-val_430	430
-val_119	119
-val_489	489
-val_458	458
-val_78	78
-val_76	76
-val_41	41
-val_223	223
-val_492	492
-val_149	149
-val_449	449
-val_218	218
-val_228	228
-val_138	138
-val_453	453
-val_30	30
-val_209	209
-val_64	64
-val_468	468
-val_76	76
-val_74	74
-val_342	342
-val_69	69
-val_230	230
-val_33	33
-val_368	368
-val_103	103
-val_296	296
-val_113	113
-val_216	216
-val_367	367
-val_344	344
-val_167	167
-val_274	274
-val_219	219
-val_239	239
-val_485	485
-val_116	116
-val_223	223
-val_256	256
-val_263	263
-val_70	70
-val_487	487
-val_480	480
-val_401	401
-val_288	288
-val_191	191
-val_5	5
-val_244	244
-val_438	438
-val_128	128
-val_467	467
-val_432	432
-val_202	202
-val_316	316
-val_229	229
-val_469	469
-val_463	463
-val_280	280
-val_2	2
-val_35	35
-val_283	283
-val_331	331
-val_235	235
-val_80	80
-val_44	44
-val_193	193
-val_321	321
-val_335	335
-val_104	104
-val_466	466
-val_366	366
-val_175	175
-val_403	403
-val_483	483
-val_53	53
-val_105	105
-val_257	257
-val_406	406
-val_409	409
-val_190	190
-val_406	406
-val_401	401
-val_114	114
-val_258	258
-val_90	90
-val_203	203
-val_262	262
-val_348	348
-val_424	424
-val_12	12
-val_396	396
-val_201	201
-val_217	217
-val_164	164
-val_431	431
-val_454	454
-val_478	478
-val_298	298
-val_125	125
-val_431	431
-val_164	164
-val_424	424
-val_187	187
-val_382	382
-val_5	5
-val_70	70
-val_397	397
-val_480	480
-val_291	291
-val_24	24
-val_351	351
-val_255	255
-val_104	104
-val_70	70
-val_163	163
-val_438	438
-val_119	119
-val_414	414
-val_200	200
-val_491	491
-val_237	237
-val_439	439
-val_360	360
-val_248	248
-val_479	479
-val_305	305
-val_417	417
-val_199	199
-val_444	444
-val_120	120
-val_429	429
-val_169	169
-val_443	443
-val_323	323
-val_325	325
-val_277	277
-val_230	230
-val_478	478
-val_178	178
-val_468	468
-val_310	310
-val_317	317
-val_333	333
-val_493	493
-val_460	460
-val_207	207
-val_249	249
-val_265	265
-val_480	480
-val_83	83
-val_136	136
-val_353	353
-val_172	172
-val_214	214
-val_462	462
-val_233	233
-val_406	406
-val_133	133
-val_175	175
-val_189	189
-val_454	454
-val_375	375
-val_401	401
-val_421	421
-val_407	407
-val_384	384
-val_256	256
-val_26	26
-val_134	134
-val_67	67
-val_384	384
-val_379	379
-val_18	18
-val_462	462
-val_492	492
-val_100	100
-val_298	298
-val_9	9
-val_341	341
-val_498	498
-val_146	146
-val_458	458
-val_362	362
-val_186	186
-val_285	285
-val_348	348
-val_167	167
-val_18	18
-val_273	273
-val_183	183
-val_281	281
-val_344	344
-val_97	97
-val_469	469
-val_315	315
-val_84	84
-val_28	28
-val_37	37
-val_448	448
-val_152	152
-val_348	348
-val_307	307
-val_194	194
-val_414	414
-val_477	477
-val_222	222
-val_126	126
-val_90	90
-val_169	169
-val_403	403
-val_400	400
-val_200	200
-val_97	97

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/input4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/input4.q.out b/ql/src/test/results/clientpositive/input4.q.out
new file mode 100644
index 0000000..83912f6
--- /dev/null
+++ b/ql/src/test/results/clientpositive/input4.q.out
@@ -0,0 +1,555 @@
+PREHOOK: query: CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@INPUT4
+POSTHOOK: query: CREATE TABLE INPUT4(KEY STRING, VALUE STRING) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@INPUT4
+PREHOOK: query: EXPLAIN
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
+PREHOOK: type: LOAD
+POSTHOOK: query: EXPLAIN
+LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
+POSTHOOK: type: LOAD
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: false
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.input4
+
+  Stage: Stage-1
+    Stats-Aggr Operator
+
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@input4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE INPUT4
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@input4
+PREHOOK: query: EXPLAIN FORMATTED
+SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN FORMATTED
+SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
+POSTHOOK: type: QUERY
+{"STAGE DEPENDENCIES":{"Stage-0":{"ROOT STAGE":"TRUE"}},"STAGE PLANS":{"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"TableScan":{"alias:":"input4alias","Statistics:":"Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE","children":{"Select Operator":{"expressions:":"value (type: string), key (type: string)","outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE","children":{"ListSink":{}}}}}}}}}}
+PREHOOK: query: SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
+PREHOOK: type: QUERY
+PREHOOK: Input: default@input4
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT Input4Alias.VALUE, Input4Alias.KEY FROM INPUT4 AS Input4Alias
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@input4
+#### A masked pattern was here ####
+val_238	238
+val_86	86
+val_311	311
+val_27	27
+val_165	165
+val_409	409
+val_255	255
+val_278	278
+val_98	98
+val_484	484
+val_265	265
+val_193	193
+val_401	401
+val_150	150
+val_273	273
+val_224	224
+val_369	369
+val_66	66
+val_128	128
+val_213	213
+val_146	146
+val_406	406
+val_429	429
+val_374	374
+val_152	152
+val_469	469
+val_145	145
+val_495	495
+val_37	37
+val_327	327
+val_281	281
+val_277	277
+val_209	209
+val_15	15
+val_82	82
+val_403	403
+val_166	166
+val_417	417
+val_430	430
+val_252	252
+val_292	292
+val_219	219
+val_287	287
+val_153	153
+val_193	193
+val_338	338
+val_446	446
+val_459	459
+val_394	394
+val_237	237
+val_482	482
+val_174	174
+val_413	413
+val_494	494
+val_207	207
+val_199	199
+val_466	466
+val_208	208
+val_174	174
+val_399	399
+val_396	396
+val_247	247
+val_417	417
+val_489	489
+val_162	162
+val_377	377
+val_397	397
+val_309	309
+val_365	365
+val_266	266
+val_439	439
+val_342	342
+val_367	367
+val_325	325
+val_167	167
+val_195	195
+val_475	475
+val_17	17
+val_113	113
+val_155	155
+val_203	203
+val_339	339
+val_0	0
+val_455	455
+val_128	128
+val_311	311
+val_316	316
+val_57	57
+val_302	302
+val_205	205
+val_149	149
+val_438	438
+val_345	345
+val_129	129
+val_170	170
+val_20	20
+val_489	489
+val_157	157
+val_378	378
+val_221	221
+val_92	92
+val_111	111
+val_47	47
+val_72	72
+val_4	4
+val_280	280
+val_35	35
+val_427	427
+val_277	277
+val_208	208
+val_356	356
+val_399	399
+val_169	169
+val_382	382
+val_498	498
+val_125	125
+val_386	386
+val_437	437
+val_469	469
+val_192	192
+val_286	286
+val_187	187
+val_176	176
+val_54	54
+val_459	459
+val_51	51
+val_138	138
+val_103	103
+val_239	239
+val_213	213
+val_216	216
+val_430	430
+val_278	278
+val_176	176
+val_289	289
+val_221	221
+val_65	65
+val_318	318
+val_332	332
+val_311	311
+val_275	275
+val_137	137
+val_241	241
+val_83	83
+val_333	333
+val_180	180
+val_284	284
+val_12	12
+val_230	230
+val_181	181
+val_67	67
+val_260	260
+val_404	404
+val_384	384
+val_489	489
+val_353	353
+val_373	373
+val_272	272
+val_138	138
+val_217	217
+val_84	84
+val_348	348
+val_466	466
+val_58	58
+val_8	8
+val_411	411
+val_230	230
+val_208	208
+val_348	348
+val_24	24
+val_463	463
+val_431	431
+val_179	179
+val_172	172
+val_42	42
+val_129	129
+val_158	158
+val_119	119
+val_496	496
+val_0	0
+val_322	322
+val_197	197
+val_468	468
+val_393	393
+val_454	454
+val_100	100
+val_298	298
+val_199	199
+val_191	191
+val_418	418
+val_96	96
+val_26	26
+val_165	165
+val_327	327
+val_230	230
+val_205	205
+val_120	120
+val_131	131
+val_51	51
+val_404	404
+val_43	43
+val_436	436
+val_156	156
+val_469	469
+val_468	468
+val_308	308
+val_95	95
+val_196	196
+val_288	288
+val_481	481
+val_457	457
+val_98	98
+val_282	282
+val_197	197
+val_187	187
+val_318	318
+val_318	318
+val_409	409
+val_470	470
+val_137	137
+val_369	369
+val_316	316
+val_169	169
+val_413	413
+val_85	85
+val_77	77
+val_0	0
+val_490	490
+val_87	87
+val_364	364
+val_179	179
+val_118	118
+val_134	134
+val_395	395
+val_282	282
+val_138	138
+val_238	238
+val_419	419
+val_15	15
+val_118	118
+val_72	72
+val_90	90
+val_307	307
+val_19	19
+val_435	435
+val_10	10
+val_277	277
+val_273	273
+val_306	306
+val_224	224
+val_309	309
+val_389	389
+val_327	327
+val_242	242
+val_369	369
+val_392	392
+val_272	272
+val_331	331
+val_401	401
+val_242	242
+val_452	452
+val_177	177
+val_226	226
+val_5	5
+val_497	497
+val_402	402
+val_396	396
+val_317	317
+val_395	395
+val_58	58
+val_35	35
+val_336	336
+val_95	95
+val_11	11
+val_168	168
+val_34	34
+val_229	229
+val_233	233
+val_143	143
+val_472	472
+val_322	322
+val_498	498
+val_160	160
+val_195	195
+val_42	42
+val_321	321
+val_430	430
+val_119	119
+val_489	489
+val_458	458
+val_78	78
+val_76	76
+val_41	41
+val_223	223
+val_492	492
+val_149	149
+val_449	449
+val_218	218
+val_228	228
+val_138	138
+val_453	453
+val_30	30
+val_209	209
+val_64	64
+val_468	468
+val_76	76
+val_74	74
+val_342	342
+val_69	69
+val_230	230
+val_33	33
+val_368	368
+val_103	103
+val_296	296
+val_113	113
+val_216	216
+val_367	367
+val_344	344
+val_167	167
+val_274	274
+val_219	219
+val_239	239
+val_485	485
+val_116	116
+val_223	223
+val_256	256
+val_263	263
+val_70	70
+val_487	487
+val_480	480
+val_401	401
+val_288	288
+val_191	191
+val_5	5
+val_244	244
+val_438	438
+val_128	128
+val_467	467
+val_432	432
+val_202	202
+val_316	316
+val_229	229
+val_469	469
+val_463	463
+val_280	280
+val_2	2
+val_35	35
+val_283	283
+val_331	331
+val_235	235
+val_80	80
+val_44	44
+val_193	193
+val_321	321
+val_335	335
+val_104	104
+val_466	466
+val_366	366
+val_175	175
+val_403	403
+val_483	483
+val_53	53
+val_105	105
+val_257	257
+val_406	406
+val_409	409
+val_190	190
+val_406	406
+val_401	401
+val_114	114
+val_258	258
+val_90	90
+val_203	203
+val_262	262
+val_348	348
+val_424	424
+val_12	12
+val_396	396
+val_201	201
+val_217	217
+val_164	164
+val_431	431
+val_454	454
+val_478	478
+val_298	298
+val_125	125
+val_431	431
+val_164	164
+val_424	424
+val_187	187
+val_382	382
+val_5	5
+val_70	70
+val_397	397
+val_480	480
+val_291	291
+val_24	24
+val_351	351
+val_255	255
+val_104	104
+val_70	70
+val_163	163
+val_438	438
+val_119	119
+val_414	414
+val_200	200
+val_491	491
+val_237	237
+val_439	439
+val_360	360
+val_248	248
+val_479	479
+val_305	305
+val_417	417
+val_199	199
+val_444	444
+val_120	120
+val_429	429
+val_169	169
+val_443	443
+val_323	323
+val_325	325
+val_277	277
+val_230	230
+val_478	478
+val_178	178
+val_468	468
+val_310	310
+val_317	317
+val_333	333
+val_493	493
+val_460	460
+val_207	207
+val_249	249
+val_265	265
+val_480	480
+val_83	83
+val_136	136
+val_353	353
+val_172	172
+val_214	214
+val_462	462
+val_233	233
+val_406	406
+val_133	133
+val_175	175
+val_189	189
+val_454	454
+val_375	375
+val_401	401
+val_421	421
+val_407	407
+val_384	384
+val_256	256
+val_26	26
+val_134	134
+val_67	67
+val_384	384
+val_379	379
+val_18	18
+val_462	462
+val_492	492
+val_100	100
+val_298	298
+val_9	9
+val_341	341
+val_498	498
+val_146	146
+val_458	458
+val_362	362
+val_186	186
+val_285	285
+val_348	348
+val_167	167
+val_18	18
+val_273	273
+val_183	183
+val_281	281
+val_344	344
+val_97	97
+val_469	469
+val_315	315
+val_84	84
+val_28	28
+val_37	37
+val_448	448
+val_152	152
+val_348	348
+val_307	307
+val_194	194
+val_414	414
+val_477	477
+val_222	222
+val_126	126
+val_90	90
+val_169	169
+val_403	403
+val_400	400
+val_200	200
+val_97	97

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/join0.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join0.q.java1.7.out b/ql/src/test/results/clientpositive/join0.q.java1.7.out
deleted file mode 100644
index 343f8a4..0000000
--- a/ql/src/test/results/clientpositive/join0.q.java1.7.out
+++ /dev/null
@@ -1,240 +0,0 @@
-Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: (key < 10) (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  sort order: 
-                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: string), _col1 (type: string)
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: (key < 10) (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  sort order: 
-                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: string), _col1 (type: string)
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          keys:
-            0 
-            1 
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-              sort order: ++++
-              Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Select Operator
-          expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
-PREHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-{"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-2":{"DEPENDENT STAGES":"Stage-1"},"Stage-0":{"DEPENDENT STAGES":"Stage-2"}},"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"src","Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE","children":{"Filter Operator":{"predicate:":"(key < 10) (type: boolean)","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Select Operator":{"expressions:":"key (type: string), value (type: string)","outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Reduce Output Operator":{"sort order:":"","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: string), _col1 (type: string)"}}}}}}}},{"TableScan":{"alias:":"src","Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COM
 PLETE Column stats: NONE","children":{"Filter Operator":{"predicate:":"(key < 10) (type: boolean)","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Select Operator":{"expressions:":"key (type: string), value (type: string)","outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Reduce Output Operator":{"sort order:":"","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: string), _col1 (type: string)"}}}}}}}}],"Reduce Operator Tree:":{"Join Operator":{"condition map:":[{"":"Inner Join 0 to 1"}],"keys:":{},"outputColumnNames:":["_col0","_col1","_col2","_col3"],"Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","children":{"File Output Operator":{"compressed:":"false","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","outp
 ut format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe"}}}}}}},"Stage-2":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"children":{"Reduce Output Operator":{"key expressions:":"_col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)","sort order:":"++++","Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE"}}}}],"Reduce Operator Tree:":{"Select Operator":{"expressions:":"KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)","outputColumnNames:":["_col0","_col1","_col2","_col3"],"Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoo
 p.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{}}}}}}
-Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
-PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	9	val_9
-0	val_0	9	val_9
-0	val_0	9	val_9
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	2	val_2
-2	val_2	4	val_4
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	8	val_8
-2	val_2	9	val_9
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	2	val_2
-4	val_4	4	val_4
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	8	val_8
-4	val_4	9	val_9
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	9	val_9
-5	val_5	9	val_9
-5	val_5	9	val_9
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	2	val_2
-8	val_8	4	val_4
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	8	val_8
-8	val_8	9	val_9
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	2	val_2
-9	val_9	4	val_4
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	8	val_8
-9	val_9	9	val_9

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/join0.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join0.q.java1.8.out b/ql/src/test/results/clientpositive/join0.q.java1.8.out
deleted file mode 100644
index 343f8a4..0000000
--- a/ql/src/test/results/clientpositive/join0.q.java1.8.out
+++ /dev/null
@@ -1,240 +0,0 @@
-Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: (key < 10) (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  sort order: 
-                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: string), _col1 (type: string)
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: (key < 10) (type: boolean)
-              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  sort order: 
-                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: string), _col1 (type: string)
-      Reduce Operator Tree:
-        Join Operator
-          condition map:
-               Inner Join 0 to 1
-          keys:
-            0 
-            1 
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-              sort order: ++++
-              Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Select Operator
-          expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3
-          Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
-PREHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-{"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-2":{"DEPENDENT STAGES":"Stage-1"},"Stage-0":{"DEPENDENT STAGES":"Stage-2"}},"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"src","Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE","children":{"Filter Operator":{"predicate:":"(key < 10) (type: boolean)","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Select Operator":{"expressions:":"key (type: string), value (type: string)","outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Reduce Output Operator":{"sort order:":"","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: string), _col1 (type: string)"}}}}}}}},{"TableScan":{"alias:":"src","Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COM
 PLETE Column stats: NONE","children":{"Filter Operator":{"predicate:":"(key < 10) (type: boolean)","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Select Operator":{"expressions:":"key (type: string), value (type: string)","outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Reduce Output Operator":{"sort order:":"","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: string), _col1 (type: string)"}}}}}}}}],"Reduce Operator Tree:":{"Join Operator":{"condition map:":[{"":"Inner Join 0 to 1"}],"keys:":{},"outputColumnNames:":["_col0","_col1","_col2","_col3"],"Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","children":{"File Output Operator":{"compressed:":"false","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","outp
 ut format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe"}}}}}}},"Stage-2":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"children":{"Reduce Output Operator":{"key expressions:":"_col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)","sort order:":"++++","Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE"}}}}],"Reduce Operator Tree:":{"Select Operator":{"expressions:":"KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)","outputColumnNames:":["_col0","_col1","_col2","_col3"],"Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoo
 p.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{}}}}}}
-Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
-PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	9	val_9
-0	val_0	9	val_9
-0	val_0	9	val_9
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	2	val_2
-2	val_2	4	val_4
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	8	val_8
-2	val_2	9	val_9
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	2	val_2
-4	val_4	4	val_4
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	8	val_8
-4	val_4	9	val_9
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	9	val_9
-5	val_5	9	val_9
-5	val_5	9	val_9
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	2	val_2
-8	val_8	4	val_4
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	8	val_8
-8	val_8	9	val_9
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	2	val_2
-9	val_9	4	val_4
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	8	val_8
-9	val_9	9	val_9


[03/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/subquery_notin_having.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/subquery_notin_having.q.out b/ql/src/test/results/clientpositive/subquery_notin_having.q.out
new file mode 100644
index 0000000..c32bf25
--- /dev/null
+++ b/ql/src/test/results/clientpositive/subquery_notin_having.q.out
@@ -0,0 +1,764 @@
+Warning: Shuffle Join JOIN[21][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: -- non agg, non corr
+
+explain
+select key, count(*) 
+from src 
+group by key
+having key not in  
+  ( select key  from src s1 
+    where s1.key > '12'
+  )
+PREHOOK: type: QUERY
+POSTHOOK: query: -- non agg, non corr
+
+explain
+select key, count(*) 
+from src 
+group by key
+having key not in  
+  ( select key  from src s1 
+    where s1.key > '12'
+  )
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1, Stage-4
+  Stage-3 depends on stages: Stage-2
+  Stage-4 is a root stage
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: string)
+              outputColumnNames: key
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: count()
+                keys: key (type: string)
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          keys: KEY._col0 (type: string)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              value expressions: _col0 (type: string), _col1 (type: bigint)
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              key expressions: _col0 (type: string)
+              sort order: +
+              Map-reduce partition columns: _col0 (type: string)
+              Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+              value expressions: _col1 (type: bigint)
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key > '12') (type: boolean)
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Outer Join0 to 1
+          keys:
+            0 _col0 (type: string)
+            1 _col0 (type: string)
+          outputColumnNames: _col0, _col1, _col3
+          Statistics: Num rows: 302 Data size: 3213 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: _col3 is null (type: boolean)
+            Statistics: Num rows: 151 Data size: 1606 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: string), _col1 (type: bigint)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 151 Data size: 1606 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 151 Data size: 1606 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              Statistics: Num rows: 500 Data size: 2000 Basic stats: COMPLETE Column stats: COMPLETE
+              Filter Operator
+                predicate: false (type: boolean)
+                Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
+                Group By Operator
+                  aggregations: count()
+                  mode: hash
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    sort order: 
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+          Filter Operator
+            predicate: (_col0 = 0) (type: boolean)
+            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[29][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: -- non agg, corr
+explain
+select b.p_mfgr, min(p_retailprice) 
+from part b 
+group by b.p_mfgr
+having b.p_mfgr not in 
+  (select p_mfgr 
+  from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a 
+  where min(p_retailprice) = l and r - l > 600
+  )
+PREHOOK: type: QUERY
+POSTHOOK: query: -- non agg, corr
+explain
+select b.p_mfgr, min(p_retailprice) 
+from part b 
+group by b.p_mfgr
+having b.p_mfgr not in 
+  (select p_mfgr 
+  from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a 
+  where min(p_retailprice) = l and r - l > 600
+  )
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1, Stage-5
+  Stage-3 depends on stages: Stage-2, Stage-6
+  Stage-4 is a root stage
+  Stage-5 depends on stages: Stage-4
+  Stage-6 is a root stage
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: p_mfgr (type: string), p_retailprice (type: double)
+              outputColumnNames: p_mfgr, p_retailprice
+              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: min(p_retailprice)
+                keys: p_mfgr (type: string)
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: double)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: min(VALUE._col0)
+          keys: KEY._col0 (type: string)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+              value expressions: _col0 (type: string), _col1 (type: double)
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              key expressions: _col0 (type: string), _col1 (type: double)
+              sort order: ++
+              Map-reduce partition columns: _col0 (type: string), _col1 (type: double)
+              Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
+          TableScan
+            Reduce Output Operator
+              key expressions: _col0 (type: string), _col1 (type: double)
+              sort order: ++
+              Map-reduce partition columns: _col0 (type: string), _col1 (type: double)
+              Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Outer Join0 to 1
+          keys:
+            0 _col0 (type: string), _col1 (type: double)
+            1 _col0 (type: string), _col1 (type: double)
+          outputColumnNames: _col0, _col1, _col3
+          Statistics: Num rows: 15 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: _col3 is null (type: boolean)
+            Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: string), _col1 (type: double)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: p_mfgr (type: string), p_retailprice (type: double)
+              outputColumnNames: p_mfgr, p_retailprice
+              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: min(p_retailprice), max(p_retailprice)
+                keys: p_mfgr (type: string)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: double), _col2 (type: double)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: min(VALUE._col0), max(VALUE._col1)
+          keys: KEY._col0 (type: string)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: (((_col2 - _col1) > 600.0) and (_col0 is null or _col1 is null)) (type: boolean)
+            Statistics: Num rows: 2 Data size: 242 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              Statistics: Num rows: 2 Data size: 242 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: count()
+                mode: hash
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-5
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+              value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: (_col0 = 0) (type: boolean)
+            Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-6
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: p_mfgr (type: string), p_retailprice (type: double)
+              outputColumnNames: p_mfgr, p_retailprice
+              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: min(p_retailprice), max(p_retailprice)
+                keys: p_mfgr (type: string)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: double), _col2 (type: double)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: min(VALUE._col0), max(VALUE._col1)
+          keys: KEY._col0 (type: string)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: ((_col2 - _col1) > 600.0) (type: boolean)
+            Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: string), _col1 (type: double)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[29][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: select b.p_mfgr, min(p_retailprice) 
+from part b 
+group by b.p_mfgr
+having b.p_mfgr not in 
+  (select p_mfgr 
+  from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a 
+  where min(p_retailprice) = l and r - l > 600
+  )
+PREHOOK: type: QUERY
+PREHOOK: Input: default@part
+#### A masked pattern was here ####
+POSTHOOK: query: select b.p_mfgr, min(p_retailprice) 
+from part b 
+group by b.p_mfgr
+having b.p_mfgr not in 
+  (select p_mfgr 
+  from (select p_mfgr, min(p_retailprice) l, max(p_retailprice) r, avg(p_retailprice) a from part group by p_mfgr) a 
+  where min(p_retailprice) = l and r - l > 600
+  )
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@part
+#### A masked pattern was here ####
+Manufacturer#1	1173.15
+Manufacturer#2	1690.68
+Warning: Shuffle Join JOIN[31][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: -- agg, non corr
+explain
+select b.p_mfgr, min(p_retailprice) 
+from part b 
+group by b.p_mfgr
+having b.p_mfgr not in 
+  (select p_mfgr 
+  from part a
+  group by p_mfgr
+  having max(p_retailprice) - min(p_retailprice) > 600
+  )
+PREHOOK: type: QUERY
+POSTHOOK: query: -- agg, non corr
+explain
+select b.p_mfgr, min(p_retailprice) 
+from part b 
+group by b.p_mfgr
+having b.p_mfgr not in 
+  (select p_mfgr 
+  from part a
+  group by p_mfgr
+  having max(p_retailprice) - min(p_retailprice) > 600
+  )
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1, Stage-5
+  Stage-3 depends on stages: Stage-2, Stage-6
+  Stage-4 is a root stage
+  Stage-5 depends on stages: Stage-4
+  Stage-6 is a root stage
+  Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: p_mfgr (type: string), p_retailprice (type: double)
+              outputColumnNames: p_mfgr, p_retailprice
+              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: min(p_retailprice)
+                keys: p_mfgr (type: string)
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: double)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: min(VALUE._col0)
+          keys: KEY._col0 (type: string)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+              value expressions: _col0 (type: string), _col1 (type: double)
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-3
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              key expressions: _col0 (type: string)
+              sort order: +
+              Map-reduce partition columns: _col0 (type: string)
+              Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
+              value expressions: _col1 (type: double)
+          TableScan
+            Reduce Output Operator
+              key expressions: _col0 (type: string)
+              sort order: +
+              Map-reduce partition columns: _col0 (type: string)
+              Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Left Outer Join0 to 1
+          keys:
+            0 _col0 (type: string)
+            1 _col0 (type: string)
+          outputColumnNames: _col0, _col1, _col3
+          Statistics: Num rows: 15 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: _col3 is null (type: boolean)
+            Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: string), _col1 (type: double)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 7 Data size: 888 Basic stats: COMPLETE Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-4
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: p_mfgr is null (type: boolean)
+              Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: p_retailprice (type: double)
+                outputColumnNames: _col1
+                Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: max(_col1), min(_col1)
+                  keys: null (type: string)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: string)
+                    Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: _col1 (type: double), _col2 (type: double)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: max(VALUE._col0), min(VALUE._col1)
+          keys: KEY._col0 (type: string)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 6 Data size: 726 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: _col1 (type: double), _col2 (type: double)
+            outputColumnNames: _col1, _col2
+            Statistics: Num rows: 6 Data size: 726 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: ((_col1 - _col2) > 600.0) (type: boolean)
+              Statistics: Num rows: 2 Data size: 242 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                Statistics: Num rows: 2 Data size: 242 Basic stats: COMPLETE Column stats: NONE
+                Group By Operator
+                  aggregations: count()
+                  mode: hash
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-5
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+              value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: count(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: (_col0 = 0) (type: boolean)
+            Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-6
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: b
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: p_mfgr (type: string), p_retailprice (type: double)
+              outputColumnNames: p_mfgr, p_retailprice
+              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+              Group By Operator
+                aggregations: max(p_retailprice), min(p_retailprice)
+                keys: p_mfgr (type: string)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col0 (type: string)
+                  Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: double), _col2 (type: double)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: max(VALUE._col0), min(VALUE._col1)
+          keys: KEY._col0 (type: string)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
+          Filter Operator
+            predicate: ((_col1 - _col2) > 600.0) (type: boolean)
+            Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: _col0 (type: string)
+              outputColumnNames: _col0
+              Statistics: Num rows: 4 Data size: 484 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[31][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-2:MAPRED' is a cross product
+PREHOOK: query: select b.p_mfgr, min(p_retailprice) 
+from part b 
+group by b.p_mfgr
+having b.p_mfgr not in 
+  (select p_mfgr 
+  from part a
+  group by p_mfgr
+  having max(p_retailprice) - min(p_retailprice) > 600
+  )
+PREHOOK: type: QUERY
+PREHOOK: Input: default@part
+#### A masked pattern was here ####
+POSTHOOK: query: select b.p_mfgr, min(p_retailprice) 
+from part b 
+group by b.p_mfgr
+having b.p_mfgr not in 
+  (select p_mfgr 
+  from part a
+  group by p_mfgr
+  having max(p_retailprice) - min(p_retailprice) > 600
+  )
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@part
+#### A masked pattern was here ####
+Manufacturer#1	1173.15
+Manufacturer#2	1690.68

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/tez/join0.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/join0.q.java1.7.out b/ql/src/test/results/clientpositive/tez/join0.q.java1.7.out
deleted file mode 100644
index 59d9087..0000000
--- a/ql/src/test/results/clientpositive/tez/join0.q.java1.7.out
+++ /dev/null
@@ -1,239 +0,0 @@
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: string)
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: string)
-        Reducer 2 
-            Reduce Operator Tree:
-              Merge Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                keys:
-                  0 
-                  1 
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-                  sort order: ++++
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-        Reducer 3 
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
-PREHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-#### A masked pattern was here ####
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
-PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	9	val_9
-0	val_0	9	val_9
-0	val_0	9	val_9
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	2	val_2
-2	val_2	4	val_4
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	8	val_8
-2	val_2	9	val_9
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	2	val_2
-4	val_4	4	val_4
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	8	val_8
-4	val_4	9	val_9
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	9	val_9
-5	val_5	9	val_9
-5	val_5	9	val_9
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	2	val_2
-8	val_8	4	val_4
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	8	val_8
-8	val_8	9	val_9
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	2	val_2
-9	val_9	4	val_4
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	8	val_8
-9	val_9	9	val_9

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/tez/join0.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/join0.q.java1.8.out b/ql/src/test/results/clientpositive/tez/join0.q.java1.8.out
deleted file mode 100644
index 10d7802..0000000
--- a/ql/src/test/results/clientpositive/tez/join0.q.java1.8.out
+++ /dev/null
@@ -1,236 +0,0 @@
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: string)
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: string)
-        Reducer 2 
-            Reduce Operator Tree:
-              Merge Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                keys:
-                  0 
-                  1 
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-                  sort order: ++++
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-        Reducer 3 
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
-PREHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-#### A masked pattern was here ####
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
-PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	9	val_9
-0	val_0	9	val_9
-0	val_0	9	val_9
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	2	val_2
-2	val_2	4	val_4
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	8	val_8
-2	val_2	9	val_9
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	2	val_2
-4	val_4	4	val_4
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	8	val_8
-4	val_4	9	val_9
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	9	val_9
-5	val_5	9	val_9
-5	val_5	9	val_9
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	2	val_2
-8	val_8	4	val_4
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	8	val_8
-8	val_8	9	val_9
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	2	val_2
-9	val_9	4	val_4
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	8	val_8
-9	val_9	9	val_9

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/tez/join0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/join0.q.out b/ql/src/test/results/clientpositive/tez/join0.q.out
new file mode 100644
index 0000000..67d71d5
--- /dev/null
+++ b/ql/src/test/results/clientpositive/tez/join0.q.out
@@ -0,0 +1,237 @@
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (key < 10) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Map 4 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (key < 10) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
+                  sort order: ++++
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+        Reducer 3 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: EXPLAIN FORMATTED
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN FORMATTED
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+#### A masked pattern was here ####
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	2	val_2
+0	val_0	2	val_2
+0	val_0	2	val_2
+0	val_0	4	val_4
+0	val_0	4	val_4
+0	val_0	4	val_4
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	8	val_8
+0	val_0	8	val_8
+0	val_0	8	val_8
+0	val_0	9	val_9
+0	val_0	9	val_9
+0	val_0	9	val_9
+2	val_2	0	val_0
+2	val_2	0	val_0
+2	val_2	0	val_0
+2	val_2	2	val_2
+2	val_2	4	val_4
+2	val_2	5	val_5
+2	val_2	5	val_5
+2	val_2	5	val_5
+2	val_2	8	val_8
+2	val_2	9	val_9
+4	val_4	0	val_0
+4	val_4	0	val_0
+4	val_4	0	val_0
+4	val_4	2	val_2
+4	val_4	4	val_4
+4	val_4	5	val_5
+4	val_4	5	val_5
+4	val_4	5	val_5
+4	val_4	8	val_8
+4	val_4	9	val_9
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	8	val_8
+5	val_5	8	val_8
+5	val_5	8	val_8
+5	val_5	9	val_9
+5	val_5	9	val_9
+5	val_5	9	val_9
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	2	val_2
+8	val_8	4	val_4
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	8	val_8
+8	val_8	9	val_9
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	2	val_2
+9	val_9	4	val_4
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	8	val_8
+9	val_9	9	val_9

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/tez/vector_cast_constant.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/vector_cast_constant.q.java1.7.out b/ql/src/test/results/clientpositive/tez/vector_cast_constant.q.java1.7.out
deleted file mode 100644
index 420e788..0000000
--- a/ql/src/test/results/clientpositive/tez/vector_cast_constant.q.java1.7.out
+++ /dev/null
@@ -1,218 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE over1korc
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE over1korc
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k
-POSTHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over1k
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over1k
-PREHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1korc
-PREHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1korc
-POSTHOOK: Lineage: over1korc.b SIMPLE [(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1korc.bin SIMPLE [(over1k)over1k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: over1korc.bo SIMPLE [(over1k)over1k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: over1korc.d SIMPLE [(over1k)over1k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: over1korc.dec SIMPLE [(over1k)over1k.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
-POSTHOOK: Lineage: over1korc.f SIMPLE [(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1korc.i SIMPLE [(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1korc.s SIMPLE [(over1k)over1k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: over1korc.si SIMPLE [(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1korc.t SIMPLE [(over1k)over1k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: over1korc.ts SIMPLE [(over1k)over1k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-PREHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: over1korc
-                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: i (type: int)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                    Group By Operator
-                      aggregations: avg(50), avg(50.0), avg(50)
-                      keys: _col0 (type: int)
-                      mode: hash
-                      outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: int)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: int)
-                        Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col1 (type: struct<count:bigint,sum:double,input:int>), _col2 (type: struct<count:bigint,sum:double,input:double>), _col3 (type: struct<count:bigint,sum:decimal(12,0),input:decimal(10,0)>)
-            Execution mode: vectorized
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: avg(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2)
-                keys: KEY._col0 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: int)
-                  sort order: +
-                  Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                  TopN Hash Memory Usage: 0.1
-                  value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: decimal(14,4))
-        Reducer 3 
-            Execution mode: vectorized
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: decimal(14,4))
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                Limit
-                  Number of rows: 10
-                  Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: 10
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-65536	50.0	50.0	50.0000
-65537	50.0	50.0	50.0000
-65538	50.0	50.0	50.0000
-65539	50.0	50.0	50.0000
-65540	50.0	50.0	50.0000
-65541	50.0	50.0	50.0000
-65542	50.0	50.0	50.0000
-65543	50.0	50.0	50.0000
-65544	50.0	50.0	50.0000
-65545	50.0	50.0	50.0000

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/tez/vector_cast_constant.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/vector_cast_constant.q.java1.8.out b/ql/src/test/results/clientpositive/tez/vector_cast_constant.q.java1.8.out
deleted file mode 100644
index 331edd0..0000000
--- a/ql/src/test/results/clientpositive/tez/vector_cast_constant.q.java1.8.out
+++ /dev/null
@@ -1,216 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE over1korc
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE over1korc
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k
-POSTHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over1k
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over1k
-PREHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1korc
-PREHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1korc
-POSTHOOK: Lineage: over1korc.b SIMPLE [(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1korc.bin SIMPLE [(over1k)over1k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: over1korc.bo SIMPLE [(over1k)over1k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: over1korc.d SIMPLE [(over1k)over1k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: over1korc.dec SIMPLE [(over1k)over1k.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
-POSTHOOK: Lineage: over1korc.f SIMPLE [(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1korc.i SIMPLE [(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1korc.s SIMPLE [(over1k)over1k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: over1korc.si SIMPLE [(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1korc.t SIMPLE [(over1k)over1k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: over1korc.ts SIMPLE [(over1k)over1k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-PREHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: over1korc
-                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: i (type: int)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                    Group By Operator
-                      aggregations: avg(50), avg(50.0), avg(50)
-                      keys: _col0 (type: int)
-                      mode: hash
-                      outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: int)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: int)
-                        Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col1 (type: struct<count:bigint,sum:double,input:int>), _col2 (type: struct<count:bigint,sum:double,input:double>), _col3 (type: struct<count:bigint,sum:decimal(12,0),input:decimal(10,0)>)
-            Execution mode: vectorized
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: avg(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2)
-                keys: KEY._col0 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: int)
-                  sort order: +
-                  Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: decimal(14,4))
-        Reducer 3 
-            Execution mode: vectorized
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: decimal(14,4))
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                Limit
-                  Number of rows: 10
-                  Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: 10
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-65536	50.0	50.0	50
-65537	50.0	50.0	50
-65538	50.0	50.0	50
-65539	50.0	50.0	50
-65540	50.0	50.0	50
-65541	50.0	50.0	50
-65542	50.0	50.0	50
-65543	50.0	50.0	50
-65544	50.0	50.0	50
-65545	50.0	50.0	50

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/tez/vector_cast_constant.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/vector_cast_constant.q.out b/ql/src/test/results/clientpositive/tez/vector_cast_constant.q.out
new file mode 100644
index 0000000..46b13c8
--- /dev/null
+++ b/ql/src/test/results/clientpositive/tez/vector_cast_constant.q.out
@@ -0,0 +1,214 @@
+PREHOOK: query: DROP TABLE over1k
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE over1k
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE over1korc
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE over1korc
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: -- data setup
+CREATE TABLE over1k(t tinyint,
+           si smallint,
+           i int,
+           b bigint,
+           f float,
+           d double,
+           bo boolean,
+           s string,
+           ts timestamp,
+           dec decimal(4,2),
+           bin binary)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@over1k
+POSTHOOK: query: -- data setup
+CREATE TABLE over1k(t tinyint,
+           si smallint,
+           i int,
+           b bigint,
+           f float,
+           d double,
+           bo boolean,
+           s string,
+           ts timestamp,
+           dec decimal(4,2),
+           bin binary)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@over1k
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@over1k
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@over1k
+PREHOOK: query: CREATE TABLE over1korc(t tinyint,
+           si smallint,
+           i int,
+           b bigint,
+           f float,
+           d double,
+           bo boolean,
+           s string,
+           ts timestamp,
+           dec decimal(4,2),
+           bin binary)
+STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@over1korc
+POSTHOOK: query: CREATE TABLE over1korc(t tinyint,
+           si smallint,
+           i int,
+           b bigint,
+           f float,
+           d double,
+           bo boolean,
+           s string,
+           ts timestamp,
+           dec decimal(4,2),
+           bin binary)
+STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@over1korc
+PREHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over1k
+PREHOOK: Output: default@over1korc
+POSTHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over1k
+POSTHOOK: Output: default@over1korc
+POSTHOOK: Lineage: over1korc.b SIMPLE [(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
+POSTHOOK: Lineage: over1korc.bin SIMPLE [(over1k)over1k.FieldSchema(name:bin, type:binary, comment:null), ]
+POSTHOOK: Lineage: over1korc.bo SIMPLE [(over1k)over1k.FieldSchema(name:bo, type:boolean, comment:null), ]
+POSTHOOK: Lineage: over1korc.d SIMPLE [(over1k)over1k.FieldSchema(name:d, type:double, comment:null), ]
+POSTHOOK: Lineage: over1korc.dec SIMPLE [(over1k)over1k.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
+POSTHOOK: Lineage: over1korc.f SIMPLE [(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
+POSTHOOK: Lineage: over1korc.i SIMPLE [(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
+POSTHOOK: Lineage: over1korc.s SIMPLE [(over1k)over1k.FieldSchema(name:s, type:string, comment:null), ]
+POSTHOOK: Lineage: over1korc.si SIMPLE [(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
+POSTHOOK: Lineage: over1korc.t SIMPLE [(over1k)over1k.FieldSchema(name:t, type:tinyint, comment:null), ]
+POSTHOOK: Lineage: over1korc.ts SIMPLE [(over1k)over1k.FieldSchema(name:ts, type:timestamp, comment:null), ]
+PREHOOK: query: EXPLAIN SELECT 
+  i,
+  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
+  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
+  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT 
+  i,
+  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
+  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
+  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: over1korc
+                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: i (type: int)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+                    Group By Operator
+                      aggregations: avg(50), avg(50.0), avg(50)
+                      keys: _col0 (type: int)
+                      mode: hash
+                      outputColumnNames: _col0, _col1, _col2, _col3
+                      Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: int)
+                        Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col1 (type: struct<count:bigint,sum:double,input:int>), _col2 (type: struct<count:bigint,sum:double,input:double>), _col3 (type: struct<count:bigint,sum:decimal(12,0),input:decimal(10,0)>)
+            Execution mode: vectorized
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: avg(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2)
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int)
+                  sort order: +
+                  Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
+                  TopN Hash Memory Usage: 0.1
+                  value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: decimal(14,4))
+        Reducer 3 
+            Execution mode: vectorized
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: decimal(14,4))
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 10
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT 
+  i,
+  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
+  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
+  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over1korc
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT 
+  i,
+  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
+  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
+  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
+  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over1korc
+#### A masked pattern was here ####
+65536	50.0	50.0	50.0000
+65537	50.0	50.0	50.0000
+65538	50.0	50.0	50.0000
+65539	50.0	50.0	50.0000
+65540	50.0	50.0	50.0000
+65541	50.0	50.0	50.0000
+65542	50.0	50.0	50.0000
+65543	50.0	50.0	50.0000
+65544	50.0	50.0	50.0000
+65545	50.0	50.0	50.0000


[09/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.java1.7.out b/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.java1.7.out
deleted file mode 100644
index 68943e1..0000000
--- a/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.java1.7.out
+++ /dev/null
@@ -1,709 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-PREHOOK: type: QUERY
-POSTHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Map 3 (PARTITION-LEVEL SORT, 2)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  GatherStats: false
-                  Select Operator
-                    expressions: key (type: string), value (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string)
-                      null sort order: a
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: string)
-                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                      tag: 0
-                      value expressions: _col1 (type: string)
-                      auto parallelism: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: src
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.src
-                    numFiles 1
-                    numRows 500
-                    rawDataSize 5312
-                    serialization.ddl struct src { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.src
-                      numFiles 1
-                      numRows 500
-                      rawDataSize 5312
-                      serialization.ddl struct src { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 5812
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src
-                  name: default.src
-            Truncated Path -> Alias:
-              /src [a]
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                  GatherStats: false
-                  Select Operator
-                    expressions: key (type: string), value (type: string), ds (type: string)
-                    outputColumnNames: _col0, _col1, _col2
-                    Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string)
-                      null sort order: a
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: string)
-                      Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                      tag: 1
-                      value expressions: _col1 (type: string), _col2 (type: string)
-                      auto parallelism: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=11
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 11
-                  properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=12
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 12
-                  properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=11
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-09
-                    hr 11
-                  properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=12
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-09
-                    hr 12
-                  properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-            Truncated Path -> Alias:
-              /srcpart/ds=2008-04-08/hr=11 [b]
-              /srcpart/ds=2008-04-08/hr=12 [b]
-              /srcpart/ds=2008-04-09/hr=11 [b]
-              /srcpart/ds=2008-04-09/hr=12 [b]
-        Reducer 2 
-            Needs Tagging: true
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Outer Join 0 to 1
-                filter mappings:
-                  1 [0, 1]
-                filter predicates:
-                  0 
-                  1 {(VALUE._col1 = '2008-04-08')}
-                keys:
-                  0 _col0 (type: string)
-                  1 _col0 (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
-                Filter Operator
-                  isSamplingPred: false
-                  predicate: ((UDFToDouble(_col0) > 10.0) and (UDFToDouble(_col0) < 20.0) and (UDFToDouble(_col2) > 15.0) and (UDFToDouble(_col2) < 25.0)) (type: boolean)
-                  Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 0
-#### A masked pattern was here ####
-                    NumFilesPerFileSink: 1
-                    Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        properties:
-                          columns _col0,_col1,_col2,_col3
-                          columns.types string:string:string:string
-                          escape.delim \
-                          hive.serialization.extend.additional.nesting.levels true
-                          serialization.escape.crlf true
-                          serialization.format 1
-                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    TotalFiles: 1
-                    GatherStats: false
-                    MultiFileSpray: false
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-#### A masked pattern was here ####
-17	val_17	17	val_17
-17	val_17	17	val_17
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-19	val_19	19	val_19
-19	val_19	19	val_19
-PREHOOK: query: EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Map 3 (PARTITION-LEVEL SORT, 2)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  GatherStats: false
-                  Filter Operator
-                    isSamplingPred: false
-                    predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
-                    Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        null sort order: a
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-                        tag: 0
-                        value expressions: _col1 (type: string)
-                        auto parallelism: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: src
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.src
-                    numFiles 1
-                    numRows 500
-                    rawDataSize 5312
-                    serialization.ddl struct src { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.src
-                      numFiles 1
-                      numRows 500
-                      rawDataSize 5312
-                      serialization.ddl struct src { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 5812
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src
-                  name: default.src
-            Truncated Path -> Alias:
-              /src [a]
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-                  GatherStats: false
-                  Filter Operator
-                    isSamplingPred: false
-                    predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
-                    Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        null sort order: a
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-                        tag: 1
-                        value expressions: _col1 (type: string)
-                        auto parallelism: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=11
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 11
-                  properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=12
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 12
-                  properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-            Truncated Path -> Alias:
-              /srcpart/ds=2008-04-08/hr=11 [b]
-              /srcpart/ds=2008-04-08/hr=12 [b]
-        Reducer 2 
-            Needs Tagging: true
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Right Outer Join0 to 1
-                keys:
-                  0 _col0 (type: string)
-                  1 _col0 (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 122 Data size: 1296 Basic stats: COMPLETE Column stats: NONE
-                Filter Operator
-                  isSamplingPred: false
-                  predicate: ((UDFToDouble(_col0) > 10.0) and (UDFToDouble(_col0) < 20.0)) (type: boolean)
-                  Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 0
-#### A masked pattern was here ####
-                    NumFilesPerFileSink: 1
-                    Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        properties:
-                          columns _col0,_col1,_col2,_col3
-                          columns.types string:string:string:string
-                          escape.delim \
-                          hive.serialization.extend.additional.nesting.levels true
-                          serialization.escape.crlf true
-                          serialization.format 1
-                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    TotalFiles: 1
-                    GatherStats: false
-                    MultiFileSpray: false
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-17	val_17	17	val_17
-17	val_17	17	val_17
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-19	val_19	19	val_19
-19	val_19	19	val_19

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.java1.8.out b/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.java1.8.out
deleted file mode 100644
index c3454ee..0000000
--- a/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.java1.8.out
+++ /dev/null
@@ -1,879 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-PREHOOK: type: QUERY
-POSTHOOK: query: -- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_FULLOUTERJOIN
-         TOK_TABREF
-            TOK_TABNAME
-               src
-            a
-         TOK_TABREF
-            TOK_TABNAME
-               srcpart
-            b
-         AND
-            =
-               .
-                  TOK_TABLE_OR_COL
-                     a
-                  key
-               .
-                  TOK_TABLE_OR_COL
-                     b
-                  key
-            =
-               .
-                  TOK_TABLE_OR_COL
-                     b
-                  ds
-               '2008-04-08'
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  a
-               key
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  a
-               value
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  b
-               key
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  b
-               value
-      TOK_WHERE
-         AND
-            AND
-               AND
-                  >
-                     .
-                        TOK_TABLE_OR_COL
-                           a
-                        key
-                     10
-                  <
-                     .
-                        TOK_TABLE_OR_COL
-                           a
-                        key
-                     20
-               >
-                  .
-                     TOK_TABLE_OR_COL
-                        b
-                     key
-                  15
-            <
-               .
-                  TOK_TABLE_OR_COL
-                     b
-                  key
-               25
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Map 3 (PARTITION-LEVEL SORT, 2)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                  GatherStats: false
-                  Select Operator
-                    expressions: key (type: string), value (type: string), ds (type: string)
-                    outputColumnNames: _col0, _col1, _col2
-                    Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string)
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: string)
-                      Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                      tag: 0
-                      value expressions: _col1 (type: string), _col2 (type: string)
-                      auto parallelism: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=11
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 11
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=12
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 12
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=11
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-09
-                    hr 11
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=12
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-09
-                    hr 12
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-            Truncated Path -> Alias:
-              /srcpart/ds=2008-04-08/hr=11 [b]
-              /srcpart/ds=2008-04-08/hr=12 [b]
-              /srcpart/ds=2008-04-09/hr=11 [b]
-              /srcpart/ds=2008-04-09/hr=12 [b]
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  GatherStats: false
-                  Select Operator
-                    expressions: key (type: string), value (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                    Reduce Output Operator
-                      key expressions: _col0 (type: string)
-                      sort order: +
-                      Map-reduce partition columns: _col0 (type: string)
-                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                      tag: 1
-                      value expressions: _col1 (type: string)
-                      auto parallelism: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: src
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.src
-                    numFiles 1
-                    numRows 500
-                    rawDataSize 5312
-                    serialization.ddl struct src { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      COLUMN_STATS_ACCURATE true
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.src
-                      numFiles 1
-                      numRows 500
-                      rawDataSize 5312
-                      serialization.ddl struct src { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 5812
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src
-                  name: default.src
-            Truncated Path -> Alias:
-              /src [a]
-        Reducer 2 
-            Needs Tagging: true
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Outer Join 0 to 1
-                filter mappings:
-                  0 [1, 1]
-                filter predicates:
-                  0 {(VALUE._col1 = '2008-04-08')}
-                  1 
-                keys:
-                  0 _col0 (type: string)
-                  1 _col0 (type: string)
-                outputColumnNames: _col0, _col1, _col3, _col4
-                Statistics: Num rows: 2200 Data size: 23372 Basic stats: COMPLETE Column stats: NONE
-                Filter Operator
-                  isSamplingPred: false
-                  predicate: ((((UDFToDouble(_col3) > 10.0) and (UDFToDouble(_col3) < 20.0)) and (UDFToDouble(_col0) > 15.0)) and (UDFToDouble(_col0) < 25.0)) (type: boolean)
-                  Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: _col3 (type: string), _col4 (type: string), _col0 (type: string), _col1 (type: string)
-                    outputColumnNames: _col0, _col1, _col2, _col3
-                    Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 0
-#### A masked pattern was here ####
-                      NumFilesPerFileSink: 1
-                      Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                      table:
-                          input format: org.apache.hadoop.mapred.TextInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                          properties:
-                            columns _col0,_col1,_col2,_col3
-                            columns.types string:string:string:string
-                            escape.delim \
-                            hive.serialization.extend.additional.nesting.levels true
-                            serialization.format 1
-                            serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      TotalFiles: 1
-                      GatherStats: false
-                      MultiFileSpray: false
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key AND b.ds = '2008-04-08')
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
-#### A masked pattern was here ####
-17	val_17	17	val_17
-17	val_17	17	val_17
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-19	val_19	19	val_19
-19	val_19	19	val_19
-PREHOOK: query: EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN EXTENDED
- FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_FULLOUTERJOIN
-         TOK_TABREF
-            TOK_TABNAME
-               src
-            a
-         TOK_TABREF
-            TOK_TABNAME
-               srcpart
-            b
-         =
-            .
-               TOK_TABLE_OR_COL
-                  a
-               key
-            .
-               TOK_TABLE_OR_COL
-                  b
-               key
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  a
-               key
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  a
-               value
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  b
-               key
-         TOK_SELEXPR
-            .
-               TOK_TABLE_OR_COL
-                  b
-               value
-      TOK_WHERE
-         AND
-            AND
-               AND
-                  AND
-                     >
-                        .
-                           TOK_TABLE_OR_COL
-                              a
-                           key
-                        10
-                     <
-                        .
-                           TOK_TABLE_OR_COL
-                              a
-                           key
-                        20
-                  >
-                     .
-                        TOK_TABLE_OR_COL
-                           b
-                        key
-                     15
-               <
-                  .
-                     TOK_TABLE_OR_COL
-                        b
-                     key
-                  25
-            =
-               .
-                  TOK_TABLE_OR_COL
-                     b
-                  ds
-               '2008-04-08'
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Map 3 (PARTITION-LEVEL SORT, 2)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: b
-                  Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-                  GatherStats: false
-                  Filter Operator
-                    isSamplingPred: false
-                    predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
-                    Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 111 Data size: 1179 Basic stats: COMPLETE Column stats: NONE
-                        tag: 0
-                        value expressions: _col1 (type: string)
-                        auto parallelism: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=11
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 11
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=12
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 12
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-            Truncated Path -> Alias:
-              /srcpart/ds=2008-04-08/hr=11 [b]
-              /srcpart/ds=2008-04-08/hr=12 [b]
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  GatherStats: false
-                  Filter Operator
-                    isSamplingPred: false
-                    predicate: ((UDFToDouble(key) > 15.0) and (UDFToDouble(key) < 25.0)) (type: boolean)
-                    Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
-                        tag: 1
-                        value expressions: _col1 (type: string)
-                        auto parallelism: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: src
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.src
-                    numFiles 1
-                    numRows 500
-                    rawDataSize 5312
-                    serialization.ddl struct src { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      COLUMN_STATS_ACCURATE true
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.src
-                      numFiles 1
-                      numRows 500
-                      rawDataSize 5312
-                      serialization.ddl struct src { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 5812
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src
-                  name: default.src
-            Truncated Path -> Alias:
-              /src [a]
-        Reducer 2 
-            Needs Tagging: true
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Left Outer Join0 to 1
-                keys:
-                  0 _col0 (type: string)
-                  1 _col0 (type: string)
-                outputColumnNames: _col0, _col1, _col3, _col4
-                Statistics: Num rows: 122 Data size: 1296 Basic stats: COMPLETE Column stats: NONE
-                Filter Operator
-                  isSamplingPred: false
-                  predicate: ((UDFToDouble(_col3) > 10.0) and (UDFToDouble(_col3) < 20.0)) (type: boolean)
-                  Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: _col3 (type: string), _col4 (type: string), _col0 (type: string), _col1 (type: string)
-                    outputColumnNames: _col0, _col1, _col2, _col3
-                    Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 0
-#### A masked pattern was here ####
-                      NumFilesPerFileSink: 1
-                      Statistics: Num rows: 13 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                      table:
-                          input format: org.apache.hadoop.mapred.TextInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                          properties:
-                            columns _col0,_col1,_col2,_col3
-                            columns.types string:string:string:string
-                            escape.delim \
-                            hive.serialization.extend.additional.nesting.levels true
-                            serialization.format 1
-                            serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      TotalFiles: 1
-                      GatherStats: false
-                      MultiFileSpray: false
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: FROM 
-  src a
- FULL OUTER JOIN 
-  srcpart b 
- ON (a.key = b.key)
- SELECT a.key, a.value, b.key, b.value
- WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 AND b.ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-17	val_17	17	val_17
-17	val_17	17	val_17
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-18	val_18	18	val_18
-19	val_19	19	val_19
-19	val_19	19	val_19


[31/34] hive git commit: HIVE-13409: Fix JDK8 test failures related to COLUMN_STATS_ACCURATE (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/join34.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join34.q.out b/ql/src/test/results/clientpositive/spark/join34.q.out
index 235d36a..ebd9c89 100644
--- a/ql/src/test/results/clientpositive/spark/join34.q.out
+++ b/ql/src/test/results/clientpositive/spark/join34.q.out
@@ -78,7 +78,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -98,7 +98,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -150,7 +150,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -170,7 +170,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -222,7 +222,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -242,7 +242,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/join35.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join35.q.out b/ql/src/test/results/clientpositive/spark/join35.q.out
index 7b873c6..d14dadf 100644
--- a/ql/src/test/results/clientpositive/spark/join35.q.out
+++ b/ql/src/test/results/clientpositive/spark/join35.q.out
@@ -86,7 +86,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -106,7 +106,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -164,7 +164,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -184,7 +184,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -236,7 +236,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -256,7 +256,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/join9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join9.q.out b/ql/src/test/results/clientpositive/spark/join9.q.out
index 4119855..05aa50b 100644
--- a/ql/src/test/results/clientpositive/spark/join9.q.out
+++ b/ql/src/test/results/clientpositive/spark/join9.q.out
@@ -64,7 +64,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -135,7 +135,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -155,7 +155,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out b/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out
index 4d6d39d..5d7cecf 100644
--- a/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out
+++ b/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out
@@ -62,7 +62,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -82,7 +82,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -129,7 +129,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -149,7 +149,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -247,7 +247,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -731,7 +731,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out b/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out
index b528357..ee70e71 100644
--- a/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out
+++ b/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out
@@ -151,7 +151,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -197,7 +197,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -243,7 +243,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -289,7 +289,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/louter_join_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/louter_join_ppr.q.out b/ql/src/test/results/clientpositive/spark/louter_join_ppr.q.out
index 3d90dc4..6cfde3f 100644
--- a/ql/src/test/results/clientpositive/spark/louter_join_ppr.q.out
+++ b/ql/src/test/results/clientpositive/spark/louter_join_ppr.q.out
@@ -63,7 +63,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -83,7 +83,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -138,7 +138,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -184,7 +184,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -370,7 +370,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -416,7 +416,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -462,7 +462,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -508,7 +508,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -582,7 +582,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -602,7 +602,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -778,7 +778,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -798,7 +798,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -853,7 +853,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -899,7 +899,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1081,7 +1081,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1127,7 +1127,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1199,7 +1199,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1219,7 +1219,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/mapjoin_mapjoin.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/mapjoin_mapjoin.q.out b/ql/src/test/results/clientpositive/spark/mapjoin_mapjoin.q.out
index aea47f7..1900ed6 100644
--- a/ql/src/test/results/clientpositive/spark/mapjoin_mapjoin.q.out
+++ b/ql/src/test/results/clientpositive/spark/mapjoin_mapjoin.q.out
@@ -50,7 +50,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -70,7 +70,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -120,7 +120,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -140,7 +140,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -237,7 +237,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -283,7 +283,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -329,7 +329,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -375,7 +375,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out b/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out
index 4aecb73..ec43c12 100644
--- a/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out
+++ b/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out
@@ -181,7 +181,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -201,7 +201,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -255,7 +255,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -300,7 +300,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -345,7 +345,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -390,7 +390,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -556,7 +556,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -576,7 +576,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -630,7 +630,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -676,7 +676,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -722,7 +722,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -768,7 +768,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -945,7 +945,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -965,7 +965,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1019,7 +1019,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1064,7 +1064,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1109,7 +1109,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1154,7 +1154,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1334,7 +1334,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1354,7 +1354,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1400,7 +1400,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1420,7 +1420,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1549,7 +1549,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1569,7 +1569,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1616,7 +1616,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1636,7 +1636,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1755,7 +1755,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1775,7 +1775,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/pcr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/pcr.q.out b/ql/src/test/results/clientpositive/spark/pcr.q.out
index cd16787..cbebbdd 100644
--- a/ql/src/test/results/clientpositive/spark/pcr.q.out
+++ b/ql/src/test/results/clientpositive/spark/pcr.q.out
@@ -3942,7 +3942,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -4077,7 +4077,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -4123,7 +4123,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -4262,7 +4262,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -4308,7 +4308,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/ppd_join_filter.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/ppd_join_filter.q.out b/ql/src/test/results/clientpositive/spark/ppd_join_filter.q.out
index 6b1cadf..fbba885 100644
--- a/ql/src/test/results/clientpositive/spark/ppd_join_filter.q.out
+++ b/ql/src/test/results/clientpositive/spark/ppd_join_filter.q.out
@@ -67,7 +67,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -87,7 +87,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -141,7 +141,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -161,7 +161,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -354,7 +354,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -374,7 +374,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -428,7 +428,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -448,7 +448,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -641,7 +641,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -661,7 +661,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -715,7 +715,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -735,7 +735,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -928,7 +928,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -948,7 +948,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1002,7 +1002,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1022,7 +1022,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/router_join_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/router_join_ppr.q.out b/ql/src/test/results/clientpositive/spark/router_join_ppr.q.out
index 1149f16..9629768 100644
--- a/ql/src/test/results/clientpositive/spark/router_join_ppr.q.out
+++ b/ql/src/test/results/clientpositive/spark/router_join_ppr.q.out
@@ -63,7 +63,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -83,7 +83,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -138,7 +138,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -184,7 +184,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -230,7 +230,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -276,7 +276,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -473,7 +473,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -519,7 +519,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -591,7 +591,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -611,7 +611,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -778,7 +778,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -798,7 +798,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -853,7 +853,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -899,7 +899,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1085,7 +1085,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1131,7 +1131,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1203,7 +1203,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1223,7 +1223,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/sample1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample1.q.out b/ql/src/test/results/clientpositive/spark/sample1.q.out
index eb9d5f6..4bd5c8c 100644
--- a/ql/src/test/results/clientpositive/spark/sample1.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample1.q.out
@@ -85,7 +85,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/sample2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample2.q.out b/ql/src/test/results/clientpositive/spark/sample2.q.out
index b13f818..fe9e2f5 100644
--- a/ql/src/test/results/clientpositive/spark/sample2.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample2.q.out
@@ -83,7 +83,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count 2
                     bucket_field_name key
                     columns key,value
@@ -104,7 +104,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count 2
                       bucket_field_name key
                       columns key,value

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/sample4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample4.q.out b/ql/src/test/results/clientpositive/spark/sample4.q.out
index 69066c1..987a445 100644
--- a/ql/src/test/results/clientpositive/spark/sample4.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample4.q.out
@@ -83,7 +83,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count 2
                     bucket_field_name key
                     columns key,value
@@ -104,7 +104,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count 2
                       bucket_field_name key
                       columns key,value

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/sample5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample5.q.out b/ql/src/test/results/clientpositive/spark/sample5.q.out
index 819939c..77477ba 100644
--- a/ql/src/test/results/clientpositive/spark/sample5.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample5.q.out
@@ -84,7 +84,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count 2
                     bucket_field_name key
                     columns key,value
@@ -105,7 +105,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count 2
                       bucket_field_name key
                       columns key,value

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/sample6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample6.q.out b/ql/src/test/results/clientpositive/spark/sample6.q.out
index bf06004..2ed7d7a 100644
--- a/ql/src/test/results/clientpositive/spark/sample6.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample6.q.out
@@ -81,7 +81,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count 2
                     bucket_field_name key
                     columns key,value
@@ -102,7 +102,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count 2
                       bucket_field_name key
                       columns key,value
@@ -472,7 +472,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count 2
                     bucket_field_name key
                     columns key,value
@@ -493,7 +493,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count 2
                       bucket_field_name key
                       columns key,value
@@ -847,7 +847,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count 2
                     bucket_field_name key
                     columns key,value
@@ -868,7 +868,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count 2
                       bucket_field_name key
                       columns key,value
@@ -1475,7 +1475,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count 2
                     bucket_field_name key
                     columns key,value
@@ -1496,7 +1496,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count 2
                       bucket_field_name key
                       columns key,value
@@ -1946,7 +1946,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count 2
                     bucket_field_name key
                     columns key,value
@@ -1967,7 +1967,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count 2
                       bucket_field_name key
                       columns key,value
@@ -2404,7 +2404,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count 4
                     bucket_field_name key
                     columns key,value
@@ -2425,7 +2425,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count 4
                       bucket_field_name key
                       columns key,value
@@ -2450,7 +2450,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count 4
                     bucket_field_name key
                     columns key,value
@@ -2471,7 +2471,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count 4
                       bucket_field_name key
                       columns key,value
@@ -2709,7 +2709,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count 4
                     bucket_field_name key
                     columns key,value
@@ -2730,7 +2730,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count 4
                       bucket_field_name key
                       columns key,value

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/sample7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample7.q.out b/ql/src/test/results/clientpositive/spark/sample7.q.out
index a821c76..784000d 100644
--- a/ql/src/test/results/clientpositive/spark/sample7.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample7.q.out
@@ -82,7 +82,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count 2
                     bucket_field_name key
                     columns key,value
@@ -103,7 +103,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count 2
                       bucket_field_name key
                       columns key,value


[30/34] hive git commit: HIVE-13409: Fix JDK8 test failures related to COLUMN_STATS_ACCURATE (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/sample8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/sample8.q.out b/ql/src/test/results/clientpositive/spark/sample8.q.out
index e847fa5..59807de 100644
--- a/ql/src/test/results/clientpositive/spark/sample8.q.out
+++ b/ql/src/test/results/clientpositive/spark/sample8.q.out
@@ -57,7 +57,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -126,7 +126,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -172,7 +172,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -218,7 +218,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -264,7 +264,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/stats0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/stats0.q.out b/ql/src/test/results/clientpositive/spark/stats0.q.out
index 0b14e21..491b4d0 100644
--- a/ql/src/test/results/clientpositive/spark/stats0.q.out
+++ b/ql/src/test/results/clientpositive/spark/stats0.q.out
@@ -74,7 +74,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -94,7 +94,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1388,7 +1388,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1408,7 +1408,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/stats_only_null.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/stats_only_null.q.out b/ql/src/test/results/clientpositive/spark/stats_only_null.q.out
index 032b659..ec65619 100644
--- a/ql/src/test/results/clientpositive/spark/stats_only_null.q.out
+++ b/ql/src/test/results/clientpositive/spark/stats_only_null.q.out
@@ -230,7 +230,7 @@ Database:           	default
 Table:              	stats_null_part     	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	6                   
 	rawDataSize         	71                  
@@ -271,7 +271,7 @@ Database:           	default
 Table:              	stats_null_part     	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	49                  

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.out b/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.out
index d6df85a..91e6cfb 100644
--- a/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.out
+++ b/ql/src/test/results/clientpositive/spark/subquery_multiinsert.q.out
@@ -73,8 +73,8 @@ STAGE PLANS:
     Spark
       Edges:
         Reducer 2 <- Map 10 (PARTITION-LEVEL SORT, 1), Reducer 9 (PARTITION-LEVEL SORT, 1)
-        Reducer 3 <- Map 7 (PARTITION-LEVEL SORT, 4), Reducer 2 (PARTITION-LEVEL SORT, 4)
-        Reducer 5 <- Map 11 (PARTITION-LEVEL SORT, 4), Map 6 (PARTITION-LEVEL SORT, 4)
+        Reducer 3 <- Map 7 (PARTITION-LEVEL SORT, 2), Reducer 2 (PARTITION-LEVEL SORT, 2)
+        Reducer 5 <- Map 11 (PARTITION-LEVEL SORT, 2), Map 6 (PARTITION-LEVEL SORT, 2)
         Reducer 9 <- Map 8 (GROUP, 1)
         Reducer 4 <- Reducer 3 (SORT, 1)
 #### A masked pattern was here ####

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out b/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out
index 57cb338..0dfd7d0 100644
--- a/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out
+++ b/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out
@@ -79,7 +79,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -125,7 +125,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -171,7 +171,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -217,7 +217,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out b/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out
index 2dfbd1c..3959df6 100644
--- a/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out
+++ b/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out
@@ -81,7 +81,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -127,7 +127,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out b/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out
index 0459d93..5762865 100644
--- a/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out
+++ b/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out
@@ -119,7 +119,7 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 2 <- Map 1 (GROUP, 4)
+        Reducer 2 <- Map 1 (GROUP, 2)
         Reducer 3 <- Reducer 2 (SORT, 1)
 #### A masked pattern was here ####
       Vertices:

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/stats0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats0.q.out b/ql/src/test/results/clientpositive/stats0.q.out
index bbe38c1..97d66e7 100644
--- a/ql/src/test/results/clientpositive/stats0.q.out
+++ b/ql/src/test/results/clientpositive/stats0.q.out
@@ -71,7 +71,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -91,7 +91,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1384,7 +1384,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1404,7 +1404,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/stats_invalidation.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats_invalidation.q.out b/ql/src/test/results/clientpositive/stats_invalidation.q.out
index d24fdc3..d822f4f 100644
--- a/ql/src/test/results/clientpositive/stats_invalidation.q.out
+++ b/ql/src/test/results/clientpositive/stats_invalidation.q.out
@@ -44,7 +44,7 @@ Retention:          	0
 #### A masked pattern was here ####
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 	numFiles            	1                   
 	numRows             	500                 
 	rawDataSize         	5312                

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/stats_only_null.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/stats_only_null.q.out b/ql/src/test/results/clientpositive/stats_only_null.q.out
index 032f6c8..b47fe64 100644
--- a/ql/src/test/results/clientpositive/stats_only_null.q.out
+++ b/ql/src/test/results/clientpositive/stats_only_null.q.out
@@ -218,7 +218,7 @@ Database:           	default
 Table:              	stats_null_part     	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	6                   
 	rawDataSize         	71                  
@@ -259,7 +259,7 @@ Database:           	default
 Table:              	stats_null_part     	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	49                  

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/tez/bucket3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/bucket3.q.out b/ql/src/test/results/clientpositive/tez/bucket3.q.out
index 1532edc..250d03d 100644
--- a/ql/src/test/results/clientpositive/tez/bucket3.q.out
+++ b/ql/src/test/results/clientpositive/tez/bucket3.q.out
@@ -59,7 +59,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -79,7 +79,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/tez/bucket4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/bucket4.q.out b/ql/src/test/results/clientpositive/tez/bucket4.q.out
index 4291e44..b14c672 100644
--- a/ql/src/test/results/clientpositive/tez/bucket4.q.out
+++ b/ql/src/test/results/clientpositive/tez/bucket4.q.out
@@ -56,7 +56,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -76,7 +76,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/tez/ctas.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/ctas.q.out b/ql/src/test/results/clientpositive/tez/ctas.q.out
index d92a446..9dffc0b 100644
--- a/ql/src/test/results/clientpositive/tez/ctas.q.out
+++ b/ql/src/test/results/clientpositive/tez/ctas.q.out
@@ -742,7 +742,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -762,7 +762,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out b/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out
index fb71214..c1717e3 100644
--- a/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out
+++ b/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out
@@ -55,7 +55,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -75,7 +75,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out b/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out
index 316c914..2c2b2cf 100644
--- a/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out
+++ b/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out
@@ -96,7 +96,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -142,7 +142,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -188,7 +188,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -234,7 +234,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -307,7 +307,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -327,7 +327,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -378,7 +378,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -398,7 +398,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out b/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out
index 2cae5ce..b4423d4 100644
--- a/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out
+++ b/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out
@@ -183,7 +183,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -203,7 +203,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -257,7 +257,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -302,7 +302,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -347,7 +347,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -392,7 +392,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -560,7 +560,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -580,7 +580,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -634,7 +634,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -680,7 +680,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -726,7 +726,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -772,7 +772,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -952,7 +952,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -972,7 +972,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1026,7 +1026,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1071,7 +1071,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1116,7 +1116,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1161,7 +1161,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1344,7 +1344,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1364,7 +1364,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1410,7 +1410,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1430,7 +1430,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1566,7 +1566,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1586,7 +1586,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1633,7 +1633,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1653,7 +1653,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'
@@ -1774,7 +1774,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -1794,7 +1794,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       columns key,value
                       columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/tez/sample1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/sample1.q.out b/ql/src/test/results/clientpositive/tez/sample1.q.out
index 2120a1ff..882621b 100644
--- a/ql/src/test/results/clientpositive/tez/sample1.q.out
+++ b/ql/src/test/results/clientpositive/tez/sample1.q.out
@@ -86,7 +86,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/tez/schema_evol_stats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/schema_evol_stats.q.out b/ql/src/test/results/clientpositive/tez/schema_evol_stats.q.out
index d396a61..af541aa 100644
--- a/ql/src/test/results/clientpositive/tez/schema_evol_stats.q.out
+++ b/ql/src/test/results/clientpositive/tez/schema_evol_stats.q.out
@@ -109,7 +109,7 @@ Database:           	default
 Table:              	partitioned1        	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	40                  
@@ -150,7 +150,7 @@ Database:           	default
 Table:              	partitioned1        	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	56                  
@@ -305,7 +305,7 @@ Database:           	default
 Table:              	partitioned1        	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	384                 
@@ -346,7 +346,7 @@ Database:           	default
 Table:              	partitioned1        	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	732                 

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/tez/stats_only_null.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/stats_only_null.q.out b/ql/src/test/results/clientpositive/tez/stats_only_null.q.out
index f4cc11e..8c17509 100644
--- a/ql/src/test/results/clientpositive/tez/stats_only_null.q.out
+++ b/ql/src/test/results/clientpositive/tez/stats_only_null.q.out
@@ -232,7 +232,7 @@ Database:           	default
 Table:              	stats_null_part     	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	6                   
 	rawDataSize         	71                  
@@ -273,7 +273,7 @@ Database:           	default
 Table:              	stats_null_part     	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\"}}
 	numFiles            	1                   
 	numRows             	4                   
 	rawDataSize         	49                  

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out b/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out
index 5d7374f..50deff6 100644
--- a/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out
+++ b/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out
@@ -80,7 +80,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -126,7 +126,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -172,7 +172,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -218,7 +218,7 @@ STAGE PLANS:
                     ds 2008-04-09
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out b/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out
index 4d74124..2b1abb9 100644
--- a/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out
+++ b/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out
@@ -82,7 +82,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'
@@ -128,7 +128,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     columns key,value
                     columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/transform_ppr1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/transform_ppr1.q.out b/ql/src/test/results/clientpositive/transform_ppr1.q.out
index 8c58139..f15646a 100644
--- a/ql/src/test/results/clientpositive/transform_ppr1.q.out
+++ b/ql/src/test/results/clientpositive/transform_ppr1.q.out
@@ -74,7 +74,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -120,7 +120,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -166,7 +166,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -212,7 +212,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/transform_ppr2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/transform_ppr2.q.out b/ql/src/test/results/clientpositive/transform_ppr2.q.out
index 8e36abd..db99985 100644
--- a/ql/src/test/results/clientpositive/transform_ppr2.q.out
+++ b/ql/src/test/results/clientpositive/transform_ppr2.q.out
@@ -76,7 +76,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -122,7 +122,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/udf_explode.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udf_explode.q.out b/ql/src/test/results/clientpositive/udf_explode.q.out
index ea12e80..bd68e96 100644
--- a/ql/src/test/results/clientpositive/udf_explode.q.out
+++ b/ql/src/test/results/clientpositive/udf_explode.q.out
@@ -82,7 +82,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -102,7 +102,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -266,7 +266,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -286,7 +286,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/udtf_explode.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udtf_explode.q.out b/ql/src/test/results/clientpositive/udtf_explode.q.out
index e067a0a..4f8bd5e 100644
--- a/ql/src/test/results/clientpositive/udtf_explode.q.out
+++ b/ql/src/test/results/clientpositive/udtf_explode.q.out
@@ -79,7 +79,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -99,7 +99,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -331,7 +331,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -351,7 +351,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/union_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union_ppr.q.out b/ql/src/test/results/clientpositive/union_ppr.q.out
index c5b1193..9763679 100644
--- a/ql/src/test/results/clientpositive/union_ppr.q.out
+++ b/ql/src/test/results/clientpositive/union_ppr.q.out
@@ -90,7 +90,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -136,7 +136,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'


[18/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_5.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_5.q.out
new file mode 100644
index 0000000..09cb847
--- /dev/null
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_5.q.out
@@ -0,0 +1,504 @@
+PREHOOK: query: -- list bucketing DML: multiple skewed columns. 2 stages
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- create a skewed table
+create table list_bucketing_dynamic_part (key String, value String) 
+partitioned by (ds String, hr String) 
+skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103')) 
+stored as DIRECTORIES
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- list bucketing DML: multiple skewed columns. 2 stages
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- create a skewed table
+create table list_bucketing_dynamic_part (key String, value String) 
+partitioned by (ds String, hr String) 
+skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103')) 
+stored as DIRECTORIES
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@list_bucketing_dynamic_part
+PREHOOK: query: -- list bucketing DML
+explain extended
+insert overwrite table list_bucketing_dynamic_part partition (ds='2008-04-08', hr) select key, value, hr from srcpart where ds='2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML
+explain extended
+insert overwrite table list_bucketing_dynamic_part partition (ds='2008-04-08', hr) select key, value, hr from srcpart where ds='2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string), hr (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/
+                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_dynamic_part
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.list_bucketing_dynamic_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /srcpart/ds=2008-04-08/hr=11 [srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [srcpart]
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.list_bucketing_dynamic_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds='2008-04-08', hr) select key, value, hr from srcpart where ds='2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
+POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds='2008-04-08', hr) select key, value, hr from srcpart where ds='2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- check DML result
+desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	list_bucketing_dynamic_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	3                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=11/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=11/key=103/value=val_103}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='12')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='12')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 12]    	 
+Database:           	default             	 
+Table:              	list_bucketing_dynamic_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	3                   
+	numRows             	500                 
+	rawDataSize         	5312                
+	totalSize           	5812                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=12/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=12/key=103/value=val_103}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: select count(1) from srcpart where ds='2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from srcpart where ds='2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+1000
+PREHOOK: query: select count(1) from list_bucketing_dynamic_part where ds='2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_dynamic_part
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from list_bucketing_dynamic_part where ds='2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+1000
+PREHOOK: query: select key, value from srcpart where ds='2008-04-08' and key = "103" and value ="val_103"
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select key, value from srcpart where ds='2008-04-08' and key = "103" and value ="val_103"
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+103	val_103
+103	val_103
+103	val_103
+103	val_103
+PREHOOK: query: explain extended
+select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_dynamic_part
+              numFiles 3
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.list_bucketing_dynamic_part
+            name: default.list_bucketing_dynamic_part
+          Partition
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_dynamic_part
+              numFiles 3
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.list_bucketing_dynamic_part
+            name: default.list_bucketing_dynamic_part
+      Processor Tree:
+        TableScan
+          alias: list_bucketing_dynamic_part
+          Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((key = '103') and (value = 'val_103')) (type: boolean)
+            Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: '103' (type: string), 'val_103' (type: string), '2008-04-08' (type: string), hr (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+              ListSink
+
+PREHOOK: query: select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_dynamic_part
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+103	val_103	2008-04-08	11
+103	val_103	2008-04-08	11
+103	val_103	2008-04-08	12
+103	val_103	2008-04-08	12
+PREHOOK: query: -- clean up resources
+drop table list_bucketing_dynamic_part
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+PREHOOK: Output: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- clean up resources
+drop table list_bucketing_dynamic_part
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: Output: default@list_bucketing_dynamic_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out
deleted file mode 100644
index c022618..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out
+++ /dev/null
@@ -1,1007 +0,0 @@
-PREHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed columns. merge.
--- The following explains merge example used in this test case
--- DML will generated 2 partitions
--- ds=2008-04-08/hr=a1
--- ds=2008-04-08/hr=b1
--- without merge, each partition has more files
--- ds=2008-04-08/hr=a1 has 2 files
--- ds=2008-04-08/hr=b1 has 6 files
--- with merge each partition has more files
--- ds=2008-04-08/hr=a1 has 1 files
--- ds=2008-04-08/hr=b1 has 4 files
--- The following shows file size and name in each directory
--- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 155 000000_0
--- 155 000001_0
--- with merge
--- 254 000000_0
--- hr=b1/key=103/value=val_103:
--- without merge
--- 99 000000_0
--- 99 000001_0
--- with merge
--- 142 000001_0
--- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 5181 000000_0
--- 5181 000001_0
--- with merge
--- 5181 000000_0
--- 5181 000001_0
--- hr=b1/key=484/value=val_484
--- without merge
--- 87 000000_0
--- 87 000001_0
--- with merge
--- 118 000002_0 
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed columns. merge.
--- The following explains merge example used in this test case
--- DML will generated 2 partitions
--- ds=2008-04-08/hr=a1
--- ds=2008-04-08/hr=b1
--- without merge, each partition has more files
--- ds=2008-04-08/hr=a1 has 2 files
--- ds=2008-04-08/hr=b1 has 6 files
--- with merge each partition has more files
--- ds=2008-04-08/hr=a1 has 1 files
--- ds=2008-04-08/hr=b1 has 4 files
--- The following shows file size and name in each directory
--- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 155 000000_0
--- 155 000001_0
--- with merge
--- 254 000000_0
--- hr=b1/key=103/value=val_103:
--- without merge
--- 99 000000_0
--- 99 000001_0
--- with merge
--- 142 000001_0
--- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 5181 000000_0
--- 5181 000001_0
--- with merge
--- 5181 000000_0
--- 5181 000001_0
--- hr=b1/key=484/value=val_484
--- without merge
--- 87 000000_0
--- 87 000001_0
--- with merge
--- 118 000002_0 
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_dynamic_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
-              outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_dynamic_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_dynamic_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
-POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-ds=2008-04-08/hr=a1
-ds=2008-04-08/hr=b1
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, a1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	2                   
-	numRows             	16                  
-	rawDataSize         	136                 
-	totalSize           	310                 
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, b1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	6                   
-	numRows             	984                 
-	rawDataSize         	9488                
-	totalSize           	10734               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103, [484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
-              outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_dynamic_part
-                      partition_columns hr
-                      partition_columns.types string
-                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_dynamic_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns hr
-                partition_columns.types string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-  Stage: Stage-3
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              partition_columns hr
-              partition_columns.types string
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns hr
-                partition_columns.types string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-5
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              partition_columns hr
-              partition_columns.types string
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns hr
-                partition_columns.types string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
-POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-ds=2008-04-08/hr=a1
-ds=2008-04-08/hr=b1
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, a1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	1                   
-	numRows             	16                  
-	rawDataSize         	136                 
-	totalSize           	254                 
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, b1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	4                   
-	numRows             	984                 
-	rawDataSize         	9488                
-	totalSize           	10622               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103, [484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(*) from list_bucketing_dynamic_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from list_bucketing_dynamic_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-1000
-PREHOOK: query: explain extended
-select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr a1
-            properties:
-              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              numFiles 1
-              numRows 16
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 136
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 254
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr b1
-            properties:
-              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              numFiles 4
-              numRows 984
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 9488
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 10622
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_dynamic_part
-          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
-            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-484	val_484	2008-04-08	b1
-484	val_484	2008-04-08	b1
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	12
-PREHOOK: query: -- clean up
-drop table list_bucketing_dynamic_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- clean up
-drop table list_bucketing_dynamic_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Output: default@list_bucketing_dynamic_part


[19/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_4.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_4.q.out
new file mode 100644
index 0000000..5f0406a
--- /dev/null
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_4.q.out
@@ -0,0 +1,811 @@
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- list bucketing DML: static partition. multiple skewed columns. merge.
+-- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+--  5263 000000_0
+--  5263 000001_0
+-- ds=2008-04-08/hr=11/key=103/value=val_103:
+-- 99 000000_0
+-- 99 000001_0
+-- after merge
+-- 142 000000_0
+-- ds=2008-04-08/hr=11/key=484/value=val_484:
+-- 87 000000_0
+-- 87 000001_0
+-- after merge
+-- 118 000001_0
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@list_bucketing_static_part
+POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- list bucketing DML: static partition. multiple skewed columns. merge.
+-- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+--  5263 000000_0
+--  5263 000001_0
+-- ds=2008-04-08/hr=11/key=103/value=val_103:
+-- 99 000000_0
+-- 99 000001_0
+-- after merge
+-- 142 000000_0
+-- ds=2008-04-08/hr=11/key=484/value=val_484:
+-- 87 000000_0
+-- 87 000001_0
+-- after merge
+-- 118 000001_0
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@list_bucketing_static_part
+PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/hr=11/
+                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_static_part
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_static_part { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_static_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /srcpart/ds=2008-04-08/hr=11 [srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [srcpart]
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 11
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_static_part
+ds=2008-04-08/hr=11
+PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	list_bucketing_static_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	6                   
+	numRows             	1000                
+	rawDataSize         	9624                
+	totalSize           	10898               
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484, [103, val_103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103/value=val_103}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
+  Stage-2 depends on stages: Stage-0
+  Stage-3
+  Stage-5
+  Stage-6 depends on stages: Stage-5
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/hr=11/
+                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_static_part
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_static_part { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_static_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /srcpart/ds=2008-04-08/hr=11 [srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [srcpart]
+
+  Stage: Stage-7
+    Conditional Operator
+
+  Stage: Stage-4
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 11
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+  Stage: Stage-3
+    Merge File Operator
+      Map Operator Tree:
+          RCFile Merge Operator
+      merge level: block
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_static_part
+              partition_columns.types string:string
+              serialization.ddl struct list_bucketing_static_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+            name: default.list_bucketing_static_part
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+
+  Stage: Stage-5
+    Merge File Operator
+      Map Operator Tree:
+          RCFile Merge Operator
+      merge level: block
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_static_part
+              partition_columns.types string:string
+              serialization.ddl struct list_bucketing_static_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+            name: default.list_bucketing_static_part
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+
+  Stage: Stage-6
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_static_part
+ds=2008-04-08/hr=11
+PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	list_bucketing_static_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	4                   
+	numRows             	1000                
+	rawDataSize         	9624                
+	totalSize           	10786               
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484, [103, val_103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103/value=val_103}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+1000
+PREHOOK: query: select count(*) from list_bucketing_static_part
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from list_bucketing_static_part
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+1000
+PREHOOK: query: explain extended
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_static_part
+              numFiles 4
+              numRows 1000
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 9624
+              serialization.ddl struct list_bucketing_static_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 10786
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+            name: default.list_bucketing_static_part
+      Processor Tree:
+        TableScan
+          alias: list_bucketing_static_part
+          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
+            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: '484' (type: string), 'val_484' (type: string), '2008-04-08' (type: string), '11' (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+              ListSink
+
+PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+484	val_484	2008-04-08	11
+484	val_484	2008-04-08	11
+PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+484	val_484	2008-04-08	11
+484	val_484	2008-04-08	12
+PREHOOK: query: -- clean up
+drop table list_bucketing_static_part
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Output: default@list_bucketing_static_part
+POSTHOOK: query: -- clean up
+drop table list_bucketing_static_part
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Output: default@list_bucketing_static_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.7.out
deleted file mode 100644
index a0947b2..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.7.out
+++ /dev/null
@@ -1,506 +0,0 @@
-PREHOOK: query: -- list bucketing DML: multiple skewed columns. 2 stages
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String) 
-partitioned by (ds String, hr String) 
-skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103')) 
-stored as DIRECTORIES
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- list bucketing DML: multiple skewed columns. 2 stages
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String) 
-partitioned by (ds String, hr String) 
-skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103')) 
-stored as DIRECTORIES
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_dynamic_part
-PREHOOK: query: -- list bucketing DML
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds='2008-04-08', hr) select key, value, hr from srcpart where ds='2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds='2008-04-08', hr) select key, value, hr from srcpart where ds='2008-04-08'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string), hr (type: string)
-              outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_dynamic_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.list_bucketing_dynamic_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.list_bucketing_dynamic_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds='2008-04-08', hr) select key, value, hr from srcpart where ds='2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
-POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds='2008-04-08', hr) select key, value, hr from srcpart where ds='2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- check DML result
-desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	3                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=11/key=103/value=val_103, [484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=11/key=484/value=val_484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='12')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='12')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 12]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	3                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=12/key=103/value=val_103, [484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=12/key=484/value=val_484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds='2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds='2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(1) from list_bucketing_dynamic_part where ds='2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_dynamic_part where ds='2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select key, value from srcpart where ds='2008-04-08' and key = "103" and value ="val_103"
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select key, value from srcpart where ds='2008-04-08' and key = "103" and value ="val_103"
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-103	val_103
-103	val_103
-103	val_103
-103	val_103
-PREHOOK: query: explain extended
-select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              numFiles 3
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-          Partition
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              numFiles 3
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_dynamic_part
-          Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((key = '103') and (value = 'val_103')) (type: boolean)
-            Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: '103' (type: string), 'val_103' (type: string), '2008-04-08' (type: string), hr (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-103	val_103	2008-04-08	11
-103	val_103	2008-04-08	11
-103	val_103	2008-04-08	12
-103	val_103	2008-04-08	12
-PREHOOK: query: -- clean up resources
-drop table list_bucketing_dynamic_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- clean up resources
-drop table list_bucketing_dynamic_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Output: default@list_bucketing_dynamic_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.8.out b/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.8.out
deleted file mode 100644
index 1c33382..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.8.out
+++ /dev/null
@@ -1,617 +0,0 @@
-PREHOOK: query: -- list bucketing DML: multiple skewed columns. 2 stages
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String) 
-partitioned by (ds String, hr String) 
-skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103')) 
-stored as DIRECTORIES
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- list bucketing DML: multiple skewed columns. 2 stages
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String) 
-partitioned by (ds String, hr String) 
-skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103')) 
-stored as DIRECTORIES
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_dynamic_part
-PREHOOK: query: -- list bucketing DML
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds='2008-04-08', hr) select key, value, hr from srcpart where ds='2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds='2008-04-08', hr) select key, value, hr from srcpart where ds='2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            srcpart
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_dynamic_part
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               hr
-      TOK_WHERE
-         =
-            TOK_TABLE_OR_COL
-               ds
-            '2008-04-08'
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string), hr (type: string)
-              outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_dynamic_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.list_bucketing_dynamic_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [$hdt$_0:srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [$hdt$_0:srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.list_bucketing_dynamic_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds='2008-04-08', hr) select key, value, hr from srcpart where ds='2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
-POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds='2008-04-08', hr) select key, value, hr from srcpart where ds='2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- check DML result
-desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	3                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=11/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=11/key=103/value=val_103}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='12')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='12')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 12]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	3                   
-	numRows             	500                 
-	rawDataSize         	5312                
-	totalSize           	5812                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
-InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=12/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=12/key=103/value=val_103}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds='2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds='2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(1) from list_bucketing_dynamic_part where ds='2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_dynamic_part where ds='2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select key, value from srcpart where ds='2008-04-08' and key = "103" and value ="val_103"
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select key, value from srcpart where ds='2008-04-08' and key = "103" and value ="val_103"
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-103	val_103
-103	val_103
-103	val_103
-103	val_103
-PREHOOK: query: explain extended
-select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            list_bucketing_dynamic_part
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               ds
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               hr
-      TOK_WHERE
-         and
-            and
-               =
-                  TOK_TABLE_OR_COL
-                     ds
-                  '2008-04-08'
-               =
-                  TOK_TABLE_OR_COL
-                     key
-                  "103"
-            =
-               TOK_TABLE_OR_COL
-                  value
-               "val_103"
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: list_bucketing_dynamic_part
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: ((key = '103') and (value = 'val_103')) (type: boolean)
-              Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: '103' (type: string), 'val_103' (type: string), '2008-04-08' (type: string), hr (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0,_col1,_col2,_col3
-                        columns.types string:string:string:string
-                        escape.delim \
-                        hive.serialization.extend.nesting.levels true
-                        serialization.format 1
-                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: value=val_103
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              numFiles 3
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-#### A masked pattern was here ####
-          Partition
-            base file name: value=val_103
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              numFiles 3
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-      Truncated Path -> Alias:
-        /list_bucketing_dynamic_part/ds=2008-04-08/hr=11/key=103/value=val_103 [list_bucketing_dynamic_part]
-        /list_bucketing_dynamic_part/ds=2008-04-08/hr=12/key=103/value=val_103 [list_bucketing_dynamic_part]
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select key, value, ds, hr from list_bucketing_dynamic_part where ds='2008-04-08' and key = "103" and value ="val_103"
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-103	val_103	2008-04-08	11
-103	val_103	2008-04-08	11
-103	val_103	2008-04-08	12
-103	val_103	2008-04-08	12
-PREHOOK: query: -- clean up resources
-drop table list_bucketing_dynamic_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- clean up resources
-drop table list_bucketing_dynamic_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Output: default@list_bucketing_dynamic_part


[24/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/join0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join0.q.out b/ql/src/test/results/clientpositive/join0.q.out
new file mode 100644
index 0000000..59122e2
--- /dev/null
+++ b/ql/src/test/results/clientpositive/join0.q.out
@@ -0,0 +1,238 @@
+Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key < 10) (type: boolean)
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: string), _col1 (type: string)
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: (key < 10) (type: boolean)
+              Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: key (type: string), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  sort order: 
+                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: string), _col1 (type: string)
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 
+            1 
+          outputColumnNames: _col0, _col1, _col2, _col3
+          Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            Reduce Output Operator
+              key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
+              sort order: ++++
+              Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
+          outputColumnNames: _col0, _col1, _col2, _col3
+          Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: EXPLAIN FORMATTED
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN FORMATTED
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+{"STAGE DEPENDENCIES":{"Stage-1":{"ROOT STAGE":"TRUE"},"Stage-2":{"DEPENDENT STAGES":"Stage-1"},"Stage-0":{"DEPENDENT STAGES":"Stage-2"}},"STAGE PLANS":{"Stage-1":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"alias:":"src","Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE","children":{"Filter Operator":{"predicate:":"(key < 10) (type: boolean)","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Select Operator":{"expressions:":"key (type: string), value (type: string)","outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Reduce Output Operator":{"sort order:":"","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: string), _col1 (type: string)"}}}}}}}},{"TableScan":{"alias:":"src","Statistics:":"Num rows: 500 Data size: 5312 Basic stats: COM
 PLETE Column stats: NONE","children":{"Filter Operator":{"predicate:":"(key < 10) (type: boolean)","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Select Operator":{"expressions:":"key (type: string), value (type: string)","outputColumnNames:":["_col0","_col1"],"Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","children":{"Reduce Output Operator":{"sort order:":"","Statistics:":"Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE","value expressions:":"_col0 (type: string), _col1 (type: string)"}}}}}}}}],"Reduce Operator Tree:":{"Join Operator":{"condition map:":[{"":"Inner Join 0 to 1"}],"keys:":{},"outputColumnNames:":["_col0","_col1","_col2","_col3"],"Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","children":{"File Output Operator":{"compressed:":"false","table:":{"input format:":"org.apache.hadoop.mapred.SequenceFileInputFormat","outp
 ut format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe"}}}}}}},"Stage-2":{"Map Reduce":{"Map Operator Tree:":[{"TableScan":{"children":{"Reduce Output Operator":{"key expressions:":"_col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)","sort order:":"++++","Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE"}}}}],"Reduce Operator Tree:":{"Select Operator":{"expressions:":"KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)","outputColumnNames:":["_col0","_col1","_col2","_col3"],"Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","children":{"File Output Operator":{"compressed:":"false","Statistics:":"Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE","table:":{"input format:":"org.apache.hadoo
 p.mapred.SequenceFileInputFormat","output format:":"org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat","serde:":"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"}}}}}}},"Stage-0":{"Fetch Operator":{"limit:":"-1","Processor Tree:":{"ListSink":{}}}}}}
+Warning: Shuffle Join JOIN[8][tables = [src1, src2]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	2	val_2
+0	val_0	2	val_2
+0	val_0	2	val_2
+0	val_0	4	val_4
+0	val_0	4	val_4
+0	val_0	4	val_4
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	8	val_8
+0	val_0	8	val_8
+0	val_0	8	val_8
+0	val_0	9	val_9
+0	val_0	9	val_9
+0	val_0	9	val_9
+2	val_2	0	val_0
+2	val_2	0	val_0
+2	val_2	0	val_0
+2	val_2	2	val_2
+2	val_2	4	val_4
+2	val_2	5	val_5
+2	val_2	5	val_5
+2	val_2	5	val_5
+2	val_2	8	val_8
+2	val_2	9	val_9
+4	val_4	0	val_0
+4	val_4	0	val_0
+4	val_4	0	val_0
+4	val_4	2	val_2
+4	val_4	4	val_4
+4	val_4	5	val_5
+4	val_4	5	val_5
+4	val_4	5	val_5
+4	val_4	8	val_8
+4	val_4	9	val_9
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	8	val_8
+5	val_5	8	val_8
+5	val_5	8	val_8
+5	val_5	9	val_9
+5	val_5	9	val_9
+5	val_5	9	val_9
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	2	val_2
+8	val_8	4	val_4
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	8	val_8
+8	val_8	9	val_9
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	2	val_2
+9	val_9	4	val_4
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	8	val_8
+9	val_9	9	val_9

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_10.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_10.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_10.q.java1.7.out
deleted file mode 100644
index 8447e86..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_10.q.java1.7.out
+++ /dev/null
@@ -1,361 +0,0 @@
-PREHOOK: query: -- run this test case in minimr to ensure it works in cluster
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key) on ('484','51','103')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- run this test case in minimr to ensure it works in cluster
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key) on ('484','51','103')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from src
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from src
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-        /src [src]
-
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-  Stage: Stage-3
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              partition_columns.types string:string
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-5
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              partition_columns.types string:string
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	4                   
-	numRows             	500                 
-	rawDataSize         	4812                
-	totalSize           	5520                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key]               	 
-Skewed Values:      	[[484], [51], [103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484, [103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103, [51]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=51}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_10.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_10.q.java1.8.out b/ql/src/test/results/clientpositive/list_bucket_dml_10.q.java1.8.out
deleted file mode 100644
index d1b9598..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_10.q.java1.8.out
+++ /dev/null
@@ -1,389 +0,0 @@
-PREHOOK: query: -- run this test case in minimr to ensure it works in cluster
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key) on ('484','51','103')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- run this test case in minimr to ensure it works in cluster
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key) on ('484','51','103')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from src
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from src
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            src
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_static_part
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-                  '11'
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE true
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-        /src [src]
-
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-  Stage: Stage-3
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              partition_columns.types string:string
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-5
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              partition_columns.types string:string
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	4                   
-	numRows             	0                   
-	rawDataSize         	0                   
-	totalSize           	5520                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key]               	 
-Skewed Values:      	[[484], [51], [103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103, [51]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=51, [484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_10.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_10.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_10.q.out
new file mode 100644
index 0000000..d4681b7
--- /dev/null
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_10.q.out
@@ -0,0 +1,359 @@
+PREHOOK: query: -- run this test case in minimr to ensure it works in cluster
+
+-- list bucketing DML: static partition. multiple skewed columns.
+-- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+--  5263 000000_0
+--  5263 000001_0
+-- ds=2008-04-08/hr=11/key=103/value=val_103:
+-- 99 000000_0
+-- 99 000001_0
+-- ds=2008-04-08/hr=11/key=484/value=val_484:
+-- 87 000000_0
+-- 87 000001_0
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key) on ('484','51','103')
+    stored as DIRECTORIES
+    STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@list_bucketing_static_part
+POSTHOOK: query: -- run this test case in minimr to ensure it works in cluster
+
+-- list bucketing DML: static partition. multiple skewed columns.
+-- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+--  5263 000000_0
+--  5263 000001_0
+-- ds=2008-04-08/hr=11/key=103/value=val_103:
+-- 99 000000_0
+-- 99 000001_0
+-- ds=2008-04-08/hr=11/key=484/value=val_484:
+-- 87 000000_0
+-- 87 000001_0
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key) on ('484','51','103')
+    stored as DIRECTORIES
+    STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@list_bucketing_static_part
+PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from src
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from src
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
+  Stage-2 depends on stages: Stage-0
+  Stage-3
+  Stage-5
+  Stage-6 depends on stages: Stage-5
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/hr=11/
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_static_part
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_static_part { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_static_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 500
+              rawDataSize 5312
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 500
+                rawDataSize 5312
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+
+  Stage: Stage-7
+    Conditional Operator
+
+  Stage: Stage-4
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 11
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+  Stage: Stage-3
+    Merge File Operator
+      Map Operator Tree:
+          RCFile Merge Operator
+      merge level: block
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_static_part
+              partition_columns.types string:string
+              serialization.ddl struct list_bucketing_static_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+            name: default.list_bucketing_static_part
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+
+  Stage: Stage-5
+    Merge File Operator
+      Map Operator Tree:
+          RCFile Merge Operator
+      merge level: block
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_static_part
+              partition_columns.types string:string
+              serialization.ddl struct list_bucketing_static_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+            name: default.list_bucketing_static_part
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+
+  Stage: Stage-6
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
+select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
+select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_static_part
+ds=2008-04-08/hr=11
+PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	list_bucketing_static_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	4                   
+	numRows             	500                 
+	rawDataSize         	4812                
+	totalSize           	5520                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key]               	 
+Skewed Values:      	[[484], [51], [103]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103, [51]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=51, [484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.7.out
deleted file mode 100644
index b58d17c..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.7.out
+++ /dev/null
@@ -1,329 +0,0 @@
-PREHOOK: query: -- Ensure it works if skewed column is not the first column in the table columns
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (value) on ('val_466','val_287','val_82')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- Ensure it works if skewed column is not the first column in the table columns
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (value) on ('val_466','val_287','val_82')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from src
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from src
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-        /src [src]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	4                   
-	numRows             	500                 
-	rawDataSize         	4812                
-	totalSize           	5522                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[value]             	 
-Skewed Values:      	[[val_466], [val_287], [val_82]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[val_82]=/list_bucketing_static_part/ds=2008-04-08/hr=11/value=val_82, [val_287]=/list_bucketing_static_part/ds=2008-04-08/hr=11/value=val_287, [val_466]=/list_bucketing_static_part/ds=2008-04-08/hr=11/value=val_466}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: explain extended
-select key, value from list_bucketing_static_part where ds='2008-04-08' and hr='11' and value = "val_466"
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select key, value from list_bucketing_static_part where ds='2008-04-08' and hr='11' and value = "val_466"
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              numFiles 4
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 4812
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 5522
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_static_part
-          Statistics: Num rows: 500 Data size: 4812 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: (value = 'val_466') (type: boolean)
-            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: key (type: string), 'val_466' (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select key, value from list_bucketing_static_part where ds='2008-04-08' and hr='11' and value = "val_466"
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select key, value from list_bucketing_static_part where ds='2008-04-08' and hr='11' and value = "val_466"
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-466	val_466
-466	val_466
-466	val_466
-PREHOOK: query: drop table list_bucketing_static_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: drop table list_bucketing_static_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Output: default@list_bucketing_static_part


[17/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out b/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out
deleted file mode 100644
index 1960d41..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out
+++ /dev/null
@@ -1,1119 +0,0 @@
-PREHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed columns. merge.
--- The following explains merge example used in this test case
--- DML will generated 2 partitions
--- ds=2008-04-08/hr=a1
--- ds=2008-04-08/hr=b1
--- without merge, each partition has more files
--- ds=2008-04-08/hr=a1 has 2 files
--- ds=2008-04-08/hr=b1 has 6 files
--- with merge each partition has more files
--- ds=2008-04-08/hr=a1 has 1 files
--- ds=2008-04-08/hr=b1 has 4 files
--- The following shows file size and name in each directory
--- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 155 000000_0
--- 155 000001_0
--- with merge
--- 254 000000_0
--- hr=b1/key=103/value=val_103:
--- without merge
--- 99 000000_0
--- 99 000001_0
--- with merge
--- 142 000001_0
--- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 5181 000000_0
--- 5181 000001_0
--- with merge
--- 5181 000000_0
--- 5181 000001_0
--- hr=b1/key=484/value=val_484
--- without merge
--- 87 000000_0
--- 87 000001_0
--- with merge
--- 118 000002_0 
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed columns. merge.
--- The following explains merge example used in this test case
--- DML will generated 2 partitions
--- ds=2008-04-08/hr=a1
--- ds=2008-04-08/hr=b1
--- without merge, each partition has more files
--- ds=2008-04-08/hr=a1 has 2 files
--- ds=2008-04-08/hr=b1 has 6 files
--- with merge each partition has more files
--- ds=2008-04-08/hr=a1 has 1 files
--- ds=2008-04-08/hr=b1 has 4 files
--- The following shows file size and name in each directory
--- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 155 000000_0
--- 155 000001_0
--- with merge
--- 254 000000_0
--- hr=b1/key=103/value=val_103:
--- without merge
--- 99 000000_0
--- 99 000001_0
--- with merge
--- 142 000001_0
--- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 5181 000000_0
--- 5181 000001_0
--- with merge
--- 5181 000000_0
--- 5181 000001_0
--- hr=b1/key=484/value=val_484
--- without merge
--- 87 000000_0
--- 87 000001_0
--- with merge
--- 118 000002_0 
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_dynamic_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            srcpart
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_dynamic_part
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-         TOK_SELEXPR
-            TOK_FUNCTION
-               if
-               ==
-                  %
-                     TOK_TABLE_OR_COL
-                        key
-                     100
-                  0
-               'a1'
-               'b1'
-      TOK_WHERE
-         =
-            TOK_TABLE_OR_COL
-               ds
-            '2008-04-08'
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
-              outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_dynamic_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_dynamic_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
-POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-ds=2008-04-08/hr=a1
-ds=2008-04-08/hr=b1
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, a1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	2                   
-	numRows             	16                  
-	rawDataSize         	136                 
-	totalSize           	310                 
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, b1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	6                   
-	numRows             	984                 
-	rawDataSize         	9488                
-	totalSize           	10734               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            srcpart
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_dynamic_part
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-         TOK_SELEXPR
-            TOK_FUNCTION
-               if
-               ==
-                  %
-                     TOK_TABLE_OR_COL
-                        key
-                     100
-                  0
-               'a1'
-               'b1'
-      TOK_WHERE
-         =
-            TOK_TABLE_OR_COL
-               ds
-            '2008-04-08'
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
-              outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_dynamic_part
-                      partition_columns hr
-                      partition_columns.types string
-                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_dynamic_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns hr
-                partition_columns.types string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-  Stage: Stage-3
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              partition_columns hr
-              partition_columns.types string
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns hr
-                partition_columns.types string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-5
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              partition_columns hr
-              partition_columns.types string
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns hr
-                partition_columns.types string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
-POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-ds=2008-04-08/hr=a1
-ds=2008-04-08/hr=b1
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, a1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	16                  
-	rawDataSize         	136                 
-	totalSize           	254                 
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, b1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	4                   
-	numRows             	984                 
-	rawDataSize         	9488                
-	totalSize           	10622               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(*) from list_bucketing_dynamic_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from list_bucketing_dynamic_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-1000
-PREHOOK: query: explain extended
-select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            list_bucketing_dynamic_part
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_ALLCOLREF
-      TOK_WHERE
-         and
-            =
-               TOK_TABLE_OR_COL
-                  key
-               '484'
-            =
-               TOK_TABLE_OR_COL
-                  value
-               'val_484'
-
-
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr a1
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              numFiles 1
-              numRows 16
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 136
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 254
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr b1
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              numFiles 4
-              numRows 984
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 9488
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 10622
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_dynamic_part
-          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
-            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-484	val_484	2008-04-08	b1
-484	val_484	2008-04-08	b1
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	12
-PREHOOK: query: -- clean up
-drop table list_bucketing_dynamic_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- clean up
-drop table list_bucketing_dynamic_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Output: default@list_bucketing_dynamic_part


[27/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.7.out b/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.7.out
deleted file mode 100644
index 52b09d4..0000000
--- a/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.7.out
+++ /dev/null
@@ -1,179 +0,0 @@
-PREHOOK: query: -- Verify that nullable fields properly work
-
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE test_serializer(string1 STRING,
-                             int1 INT,
-                             tinyint1 TINYINT,
-                             smallint1 SMALLINT,
-                             bigint1 BIGINT,
-                             boolean1 BOOLEAN,
-                             float1 FLOAT,
-                             double1 DOUBLE,
-                             list1 ARRAY<STRING>,
-                             map1 MAP<STRING,INT>,
-                             struct1 STRUCT<sint:INT,sboolean:BOOLEAN,sstring:STRING>,
-                             enum1 STRING,
-                             nullableint INT,
-                             bytes1 BINARY,
-                             fixed1 BINARY)
- ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY ':' MAP KEYS TERMINATED BY '#' LINES TERMINATED BY '\n'
- STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@test_serializer
-POSTHOOK: query: -- Verify that nullable fields properly work
-
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE test_serializer(string1 STRING,
-                             int1 INT,
-                             tinyint1 TINYINT,
-                             smallint1 SMALLINT,
-                             bigint1 BIGINT,
-                             boolean1 BOOLEAN,
-                             float1 FLOAT,
-                             double1 DOUBLE,
-                             list1 ARRAY<STRING>,
-                             map1 MAP<STRING,INT>,
-                             struct1 STRUCT<sint:INT,sboolean:BOOLEAN,sstring:STRING>,
-                             enum1 STRING,
-                             nullableint INT,
-                             bytes1 BINARY,
-                             fixed1 BINARY)
- ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY ':' MAP KEYS TERMINATED BY '#' LINES TERMINATED BY '\n'
- STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@test_serializer
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/csv.txt' INTO TABLE test_serializer
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@test_serializer
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/csv.txt' INTO TABLE test_serializer
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@test_serializer
-PREHOOK: query: CREATE TABLE as_avro
-  ROW FORMAT
-  SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
-  STORED AS
-  INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'
-  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'
-  TBLPROPERTIES (
-    'avro.schema.literal'='{
-      "namespace": "com.howdy",
-      "name": "some_schema",
-      "type": "record",
-      "fields": [
-        { "name": "string1", "type": ["null", "string"] },
-        { "name": "int1", "type": ["null", "int"] },
-        { "name": "tinyint1", "type": ["null", "int"] },
-        { "name": "smallint1", "type": ["null", "int"] },
-        { "name": "bigint1", "type": ["null", "long"] },
-        { "name": "boolean1", "type": ["null", "boolean"] },
-        { "name": "float1", "type": ["null", "float"] },
-        { "name": "double1", "type": ["null", "double"] },
-        { "name": "list1", "type": ["null", {"type": "array", "items": "string"}] },
-        { "name": "map1", "type": ["null", {"type": "map", "values": "int"}] },
-        { "name": "struct1", "type": ["null", {"type": "record", "name": "struct1_name", "fields": [
-          { "name": "sInt", "type": "int" },
-          { "name": "sBoolean", "type": "boolean" },
-          { "name": "sString", "type": "string" }
-        ]}] },
-        { "name": "enum1", "type": ["null", {"type": "enum", "name": "enum1_values", "symbols": ["BLUE", "RED", "GREEN"]}] },
-        { "name": "nullableint", "type": ["null", "int"] },
-        { "name": "bytes1", "type": ["null", "bytes"] },
-        { "name": "fixed1", "type": ["null", {"type": "fixed", "name": "threebytes", "size": 3}] }
-      ]
-    }'
-  )
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@as_avro
-POSTHOOK: query: CREATE TABLE as_avro
-  ROW FORMAT
-  SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
-  STORED AS
-  INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'
-  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'
-  TBLPROPERTIES (
-    'avro.schema.literal'='{
-      "namespace": "com.howdy",
-      "name": "some_schema",
-      "type": "record",
-      "fields": [
-        { "name": "string1", "type": ["null", "string"] },
-        { "name": "int1", "type": ["null", "int"] },
-        { "name": "tinyint1", "type": ["null", "int"] },
-        { "name": "smallint1", "type": ["null", "int"] },
-        { "name": "bigint1", "type": ["null", "long"] },
-        { "name": "boolean1", "type": ["null", "boolean"] },
-        { "name": "float1", "type": ["null", "float"] },
-        { "name": "double1", "type": ["null", "double"] },
-        { "name": "list1", "type": ["null", {"type": "array", "items": "string"}] },
-        { "name": "map1", "type": ["null", {"type": "map", "values": "int"}] },
-        { "name": "struct1", "type": ["null", {"type": "record", "name": "struct1_name", "fields": [
-          { "name": "sInt", "type": "int" },
-          { "name": "sBoolean", "type": "boolean" },
-          { "name": "sString", "type": "string" }
-        ]}] },
-        { "name": "enum1", "type": ["null", {"type": "enum", "name": "enum1_values", "symbols": ["BLUE", "RED", "GREEN"]}] },
-        { "name": "nullableint", "type": ["null", "int"] },
-        { "name": "bytes1", "type": ["null", "bytes"] },
-        { "name": "fixed1", "type": ["null", {"type": "fixed", "name": "threebytes", "size": 3}] }
-      ]
-    }'
-  )
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@as_avro
-PREHOOK: query: INSERT OVERWRITE TABLE as_avro SELECT * FROM test_serializer
-PREHOOK: type: QUERY
-PREHOOK: Input: default@test_serializer
-PREHOOK: Output: default@as_avro
-POSTHOOK: query: INSERT OVERWRITE TABLE as_avro SELECT * FROM test_serializer
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@test_serializer
-POSTHOOK: Output: default@as_avro
-POSTHOOK: Lineage: as_avro.bigint1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:bigint1, type:bigint, comment:null), ]
-POSTHOOK: Lineage: as_avro.boolean1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:boolean1, type:boolean, comment:null), ]
-POSTHOOK: Lineage: as_avro.bytes1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:bytes1, type:binary, comment:null), ]
-POSTHOOK: Lineage: as_avro.double1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:double1, type:double, comment:null), ]
-POSTHOOK: Lineage: as_avro.enum1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:enum1, type:string, comment:null), ]
-POSTHOOK: Lineage: as_avro.fixed1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:fixed1, type:binary, comment:null), ]
-POSTHOOK: Lineage: as_avro.float1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:float1, type:float, comment:null), ]
-POSTHOOK: Lineage: as_avro.int1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:int1, type:int, comment:null), ]
-POSTHOOK: Lineage: as_avro.list1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:list1, type:array<string>, comment:null), ]
-POSTHOOK: Lineage: as_avro.map1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:map1, type:map<string,int>, comment:null), ]
-POSTHOOK: Lineage: as_avro.nullableint SIMPLE [(test_serializer)test_serializer.FieldSchema(name:nullableint, type:int, comment:null), ]
-POSTHOOK: Lineage: as_avro.smallint1 EXPRESSION [(test_serializer)test_serializer.FieldSchema(name:smallint1, type:smallint, comment:null), ]
-POSTHOOK: Lineage: as_avro.string1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:string1, type:string, comment:null), ]
-POSTHOOK: Lineage: as_avro.struct1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:struct1, type:struct<sint:int,sboolean:boolean,sstring:string>, comment:null), ]
-POSTHOOK: Lineage: as_avro.tinyint1 EXPRESSION [(test_serializer)test_serializer.FieldSchema(name:tinyint1, type:tinyint, comment:null), ]
-PREHOOK: query: SELECT * FROM as_avro
-PREHOOK: type: QUERY
-PREHOOK: Input: default@as_avro
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM as_avro
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@as_avro
-#### A masked pattern was here ####
-why hello there	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-another record	98	4	101	9999999	false	99.89	9.0E-8	["beta"]	{"Earth":101}	{"sint":1134,"sboolean":false,"sstring":"wazzup"}	RED	NULL		ef
-third record	45	5	102	999999999	true	89.99	9.0E-14	["alpha","gamma"]	{"Earth":237,"Bob":723}	{"sint":102,"sboolean":false,"sstring":"BNL"}	GREEN	NULL		hi
-NULL	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	NULL	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	NULL	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	NULL	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	NULL	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	1412341	NULL	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	1412341	true	NULL	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	1412341	true	42.43	NULL	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	1412341	true	42.43	85.23423424	NULL	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	NULL	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	NULL	BLUE	72		bc
-string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	NULL	72		bc
-string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	NULL		bc
-string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72	NULL	bc
-string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Bob":31,"Control":86}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		NULL

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.8.out b/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.8.out
deleted file mode 100644
index 3690f7b..0000000
--- a/ql/src/test/results/clientpositive/avro_nullable_fields.q.java1.8.out
+++ /dev/null
@@ -1,179 +0,0 @@
-PREHOOK: query: -- Verify that nullable fields properly work
-
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE test_serializer(string1 STRING,
-                             int1 INT,
-                             tinyint1 TINYINT,
-                             smallint1 SMALLINT,
-                             bigint1 BIGINT,
-                             boolean1 BOOLEAN,
-                             float1 FLOAT,
-                             double1 DOUBLE,
-                             list1 ARRAY<STRING>,
-                             map1 MAP<STRING,INT>,
-                             struct1 STRUCT<sint:INT,sboolean:BOOLEAN,sstring:STRING>,
-                             enum1 STRING,
-                             nullableint INT,
-                             bytes1 BINARY,
-                             fixed1 BINARY)
- ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY ':' MAP KEYS TERMINATED BY '#' LINES TERMINATED BY '\n'
- STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@test_serializer
-POSTHOOK: query: -- Verify that nullable fields properly work
-
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-CREATE TABLE test_serializer(string1 STRING,
-                             int1 INT,
-                             tinyint1 TINYINT,
-                             smallint1 SMALLINT,
-                             bigint1 BIGINT,
-                             boolean1 BOOLEAN,
-                             float1 FLOAT,
-                             double1 DOUBLE,
-                             list1 ARRAY<STRING>,
-                             map1 MAP<STRING,INT>,
-                             struct1 STRUCT<sint:INT,sboolean:BOOLEAN,sstring:STRING>,
-                             enum1 STRING,
-                             nullableint INT,
-                             bytes1 BINARY,
-                             fixed1 BINARY)
- ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY ':' MAP KEYS TERMINATED BY '#' LINES TERMINATED BY '\n'
- STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@test_serializer
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/csv.txt' INTO TABLE test_serializer
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@test_serializer
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/csv.txt' INTO TABLE test_serializer
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@test_serializer
-PREHOOK: query: CREATE TABLE as_avro
-  ROW FORMAT
-  SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
-  STORED AS
-  INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'
-  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'
-  TBLPROPERTIES (
-    'avro.schema.literal'='{
-      "namespace": "com.howdy",
-      "name": "some_schema",
-      "type": "record",
-      "fields": [
-        { "name": "string1", "type": ["null", "string"] },
-        { "name": "int1", "type": ["null", "int"] },
-        { "name": "tinyint1", "type": ["null", "int"] },
-        { "name": "smallint1", "type": ["null", "int"] },
-        { "name": "bigint1", "type": ["null", "long"] },
-        { "name": "boolean1", "type": ["null", "boolean"] },
-        { "name": "float1", "type": ["null", "float"] },
-        { "name": "double1", "type": ["null", "double"] },
-        { "name": "list1", "type": ["null", {"type": "array", "items": "string"}] },
-        { "name": "map1", "type": ["null", {"type": "map", "values": "int"}] },
-        { "name": "struct1", "type": ["null", {"type": "record", "name": "struct1_name", "fields": [
-          { "name": "sInt", "type": "int" },
-          { "name": "sBoolean", "type": "boolean" },
-          { "name": "sString", "type": "string" }
-        ]}] },
-        { "name": "enum1", "type": ["null", {"type": "enum", "name": "enum1_values", "symbols": ["BLUE", "RED", "GREEN"]}] },
-        { "name": "nullableint", "type": ["null", "int"] },
-        { "name": "bytes1", "type": ["null", "bytes"] },
-        { "name": "fixed1", "type": ["null", {"type": "fixed", "name": "threebytes", "size": 3}] }
-      ]
-    }'
-  )
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@as_avro
-POSTHOOK: query: CREATE TABLE as_avro
-  ROW FORMAT
-  SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
-  STORED AS
-  INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'
-  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'
-  TBLPROPERTIES (
-    'avro.schema.literal'='{
-      "namespace": "com.howdy",
-      "name": "some_schema",
-      "type": "record",
-      "fields": [
-        { "name": "string1", "type": ["null", "string"] },
-        { "name": "int1", "type": ["null", "int"] },
-        { "name": "tinyint1", "type": ["null", "int"] },
-        { "name": "smallint1", "type": ["null", "int"] },
-        { "name": "bigint1", "type": ["null", "long"] },
-        { "name": "boolean1", "type": ["null", "boolean"] },
-        { "name": "float1", "type": ["null", "float"] },
-        { "name": "double1", "type": ["null", "double"] },
-        { "name": "list1", "type": ["null", {"type": "array", "items": "string"}] },
-        { "name": "map1", "type": ["null", {"type": "map", "values": "int"}] },
-        { "name": "struct1", "type": ["null", {"type": "record", "name": "struct1_name", "fields": [
-          { "name": "sInt", "type": "int" },
-          { "name": "sBoolean", "type": "boolean" },
-          { "name": "sString", "type": "string" }
-        ]}] },
-        { "name": "enum1", "type": ["null", {"type": "enum", "name": "enum1_values", "symbols": ["BLUE", "RED", "GREEN"]}] },
-        { "name": "nullableint", "type": ["null", "int"] },
-        { "name": "bytes1", "type": ["null", "bytes"] },
-        { "name": "fixed1", "type": ["null", {"type": "fixed", "name": "threebytes", "size": 3}] }
-      ]
-    }'
-  )
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@as_avro
-PREHOOK: query: INSERT OVERWRITE TABLE as_avro SELECT * FROM test_serializer
-PREHOOK: type: QUERY
-PREHOOK: Input: default@test_serializer
-PREHOOK: Output: default@as_avro
-POSTHOOK: query: INSERT OVERWRITE TABLE as_avro SELECT * FROM test_serializer
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@test_serializer
-POSTHOOK: Output: default@as_avro
-POSTHOOK: Lineage: as_avro.bigint1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:bigint1, type:bigint, comment:null), ]
-POSTHOOK: Lineage: as_avro.boolean1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:boolean1, type:boolean, comment:null), ]
-POSTHOOK: Lineage: as_avro.bytes1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:bytes1, type:binary, comment:null), ]
-POSTHOOK: Lineage: as_avro.double1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:double1, type:double, comment:null), ]
-POSTHOOK: Lineage: as_avro.enum1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:enum1, type:string, comment:null), ]
-POSTHOOK: Lineage: as_avro.fixed1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:fixed1, type:binary, comment:null), ]
-POSTHOOK: Lineage: as_avro.float1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:float1, type:float, comment:null), ]
-POSTHOOK: Lineage: as_avro.int1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:int1, type:int, comment:null), ]
-POSTHOOK: Lineage: as_avro.list1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:list1, type:array<string>, comment:null), ]
-POSTHOOK: Lineage: as_avro.map1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:map1, type:map<string,int>, comment:null), ]
-POSTHOOK: Lineage: as_avro.nullableint SIMPLE [(test_serializer)test_serializer.FieldSchema(name:nullableint, type:int, comment:null), ]
-POSTHOOK: Lineage: as_avro.smallint1 EXPRESSION [(test_serializer)test_serializer.FieldSchema(name:smallint1, type:smallint, comment:null), ]
-POSTHOOK: Lineage: as_avro.string1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:string1, type:string, comment:null), ]
-POSTHOOK: Lineage: as_avro.struct1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:struct1, type:struct<sint:int,sboolean:boolean,sstring:string>, comment:null), ]
-POSTHOOK: Lineage: as_avro.tinyint1 EXPRESSION [(test_serializer)test_serializer.FieldSchema(name:tinyint1, type:tinyint, comment:null), ]
-PREHOOK: query: SELECT * FROM as_avro
-PREHOOK: type: QUERY
-PREHOOK: Input: default@as_avro
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM as_avro
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@as_avro
-#### A masked pattern was here ####
-why hello there	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-another record	98	4	101	9999999	false	99.89	9.0E-8	["beta"]	{"Earth":101}	{"sint":1134,"sboolean":false,"sstring":"wazzup"}	RED	NULL		ef
-third record	45	5	102	999999999	true	89.99	9.0E-14	["alpha","gamma"]	{"Earth":237,"Bob":723}	{"sint":102,"sboolean":false,"sstring":"BNL"}	GREEN	NULL		hi
-NULL	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	NULL	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	NULL	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	NULL	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	NULL	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	1412341	NULL	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	1412341	true	NULL	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	1412341	true	42.43	NULL	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	1412341	true	42.43	85.23423424	NULL	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	NULL	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
-string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	NULL	BLUE	72		bc
-string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	NULL	72		bc
-string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	NULL		bc
-string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72	NULL	bc
-string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		NULL

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/avro_nullable_fields.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avro_nullable_fields.q.out b/ql/src/test/results/clientpositive/avro_nullable_fields.q.out
new file mode 100644
index 0000000..2272b34
--- /dev/null
+++ b/ql/src/test/results/clientpositive/avro_nullable_fields.q.out
@@ -0,0 +1,177 @@
+PREHOOK: query: -- Verify that nullable fields properly work
+
+
+CREATE TABLE test_serializer(string1 STRING,
+                             int1 INT,
+                             tinyint1 TINYINT,
+                             smallint1 SMALLINT,
+                             bigint1 BIGINT,
+                             boolean1 BOOLEAN,
+                             float1 FLOAT,
+                             double1 DOUBLE,
+                             list1 ARRAY<STRING>,
+                             map1 MAP<STRING,INT>,
+                             struct1 STRUCT<sint:INT,sboolean:BOOLEAN,sstring:STRING>,
+                             enum1 STRING,
+                             nullableint INT,
+                             bytes1 BINARY,
+                             fixed1 BINARY)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY ':' MAP KEYS TERMINATED BY '#' LINES TERMINATED BY '\n'
+ STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_serializer
+POSTHOOK: query: -- Verify that nullable fields properly work
+
+
+CREATE TABLE test_serializer(string1 STRING,
+                             int1 INT,
+                             tinyint1 TINYINT,
+                             smallint1 SMALLINT,
+                             bigint1 BIGINT,
+                             boolean1 BOOLEAN,
+                             float1 FLOAT,
+                             double1 DOUBLE,
+                             list1 ARRAY<STRING>,
+                             map1 MAP<STRING,INT>,
+                             struct1 STRUCT<sint:INT,sboolean:BOOLEAN,sstring:STRING>,
+                             enum1 STRING,
+                             nullableint INT,
+                             bytes1 BINARY,
+                             fixed1 BINARY)
+ ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY ':' MAP KEYS TERMINATED BY '#' LINES TERMINATED BY '\n'
+ STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_serializer
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/csv.txt' INTO TABLE test_serializer
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@test_serializer
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/csv.txt' INTO TABLE test_serializer
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@test_serializer
+PREHOOK: query: CREATE TABLE as_avro
+  ROW FORMAT
+  SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
+  STORED AS
+  INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'
+  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'
+  TBLPROPERTIES (
+    'avro.schema.literal'='{
+      "namespace": "com.howdy",
+      "name": "some_schema",
+      "type": "record",
+      "fields": [
+        { "name": "string1", "type": ["null", "string"] },
+        { "name": "int1", "type": ["null", "int"] },
+        { "name": "tinyint1", "type": ["null", "int"] },
+        { "name": "smallint1", "type": ["null", "int"] },
+        { "name": "bigint1", "type": ["null", "long"] },
+        { "name": "boolean1", "type": ["null", "boolean"] },
+        { "name": "float1", "type": ["null", "float"] },
+        { "name": "double1", "type": ["null", "double"] },
+        { "name": "list1", "type": ["null", {"type": "array", "items": "string"}] },
+        { "name": "map1", "type": ["null", {"type": "map", "values": "int"}] },
+        { "name": "struct1", "type": ["null", {"type": "record", "name": "struct1_name", "fields": [
+          { "name": "sInt", "type": "int" },
+          { "name": "sBoolean", "type": "boolean" },
+          { "name": "sString", "type": "string" }
+        ]}] },
+        { "name": "enum1", "type": ["null", {"type": "enum", "name": "enum1_values", "symbols": ["BLUE", "RED", "GREEN"]}] },
+        { "name": "nullableint", "type": ["null", "int"] },
+        { "name": "bytes1", "type": ["null", "bytes"] },
+        { "name": "fixed1", "type": ["null", {"type": "fixed", "name": "threebytes", "size": 3}] }
+      ]
+    }'
+  )
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@as_avro
+POSTHOOK: query: CREATE TABLE as_avro
+  ROW FORMAT
+  SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
+  STORED AS
+  INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat'
+  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat'
+  TBLPROPERTIES (
+    'avro.schema.literal'='{
+      "namespace": "com.howdy",
+      "name": "some_schema",
+      "type": "record",
+      "fields": [
+        { "name": "string1", "type": ["null", "string"] },
+        { "name": "int1", "type": ["null", "int"] },
+        { "name": "tinyint1", "type": ["null", "int"] },
+        { "name": "smallint1", "type": ["null", "int"] },
+        { "name": "bigint1", "type": ["null", "long"] },
+        { "name": "boolean1", "type": ["null", "boolean"] },
+        { "name": "float1", "type": ["null", "float"] },
+        { "name": "double1", "type": ["null", "double"] },
+        { "name": "list1", "type": ["null", {"type": "array", "items": "string"}] },
+        { "name": "map1", "type": ["null", {"type": "map", "values": "int"}] },
+        { "name": "struct1", "type": ["null", {"type": "record", "name": "struct1_name", "fields": [
+          { "name": "sInt", "type": "int" },
+          { "name": "sBoolean", "type": "boolean" },
+          { "name": "sString", "type": "string" }
+        ]}] },
+        { "name": "enum1", "type": ["null", {"type": "enum", "name": "enum1_values", "symbols": ["BLUE", "RED", "GREEN"]}] },
+        { "name": "nullableint", "type": ["null", "int"] },
+        { "name": "bytes1", "type": ["null", "bytes"] },
+        { "name": "fixed1", "type": ["null", {"type": "fixed", "name": "threebytes", "size": 3}] }
+      ]
+    }'
+  )
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@as_avro
+PREHOOK: query: INSERT OVERWRITE TABLE as_avro SELECT * FROM test_serializer
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_serializer
+PREHOOK: Output: default@as_avro
+POSTHOOK: query: INSERT OVERWRITE TABLE as_avro SELECT * FROM test_serializer
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_serializer
+POSTHOOK: Output: default@as_avro
+POSTHOOK: Lineage: as_avro.bigint1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:bigint1, type:bigint, comment:null), ]
+POSTHOOK: Lineage: as_avro.boolean1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:boolean1, type:boolean, comment:null), ]
+POSTHOOK: Lineage: as_avro.bytes1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:bytes1, type:binary, comment:null), ]
+POSTHOOK: Lineage: as_avro.double1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:double1, type:double, comment:null), ]
+POSTHOOK: Lineage: as_avro.enum1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:enum1, type:string, comment:null), ]
+POSTHOOK: Lineage: as_avro.fixed1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:fixed1, type:binary, comment:null), ]
+POSTHOOK: Lineage: as_avro.float1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:float1, type:float, comment:null), ]
+POSTHOOK: Lineage: as_avro.int1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:int1, type:int, comment:null), ]
+POSTHOOK: Lineage: as_avro.list1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:list1, type:array<string>, comment:null), ]
+POSTHOOK: Lineage: as_avro.map1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:map1, type:map<string,int>, comment:null), ]
+POSTHOOK: Lineage: as_avro.nullableint SIMPLE [(test_serializer)test_serializer.FieldSchema(name:nullableint, type:int, comment:null), ]
+POSTHOOK: Lineage: as_avro.smallint1 EXPRESSION [(test_serializer)test_serializer.FieldSchema(name:smallint1, type:smallint, comment:null), ]
+POSTHOOK: Lineage: as_avro.string1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:string1, type:string, comment:null), ]
+POSTHOOK: Lineage: as_avro.struct1 SIMPLE [(test_serializer)test_serializer.FieldSchema(name:struct1, type:struct<sint:int,sboolean:boolean,sstring:string>, comment:null), ]
+POSTHOOK: Lineage: as_avro.tinyint1 EXPRESSION [(test_serializer)test_serializer.FieldSchema(name:tinyint1, type:tinyint, comment:null), ]
+PREHOOK: query: SELECT * FROM as_avro
+PREHOOK: type: QUERY
+PREHOOK: Input: default@as_avro
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM as_avro
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@as_avro
+#### A masked pattern was here ####
+why hello there	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
+another record	98	4	101	9999999	false	99.89	9.0E-8	["beta"]	{"Earth":101}	{"sint":1134,"sboolean":false,"sstring":"wazzup"}	RED	NULL		ef
+third record	45	5	102	999999999	true	89.99	9.0E-14	["alpha","gamma"]	{"Earth":237,"Bob":723}	{"sint":102,"sboolean":false,"sstring":"BNL"}	GREEN	NULL		hi
+NULL	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
+string	NULL	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
+string	42	NULL	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
+string	42	3	NULL	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
+string	42	3	100	NULL	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
+string	42	3	100	1412341	NULL	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
+string	42	3	100	1412341	true	NULL	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
+string	42	3	100	1412341	true	42.43	NULL	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
+string	42	3	100	1412341	true	42.43	85.23423424	NULL	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
+string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	NULL	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		bc
+string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	NULL	BLUE	72		bc
+string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	NULL	72		bc
+string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	NULL		bc
+string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72	NULL	bc
+string	42	3	100	1412341	true	42.43	85.23423424	["alpha","beta","gamma"]	{"Earth":42,"Control":86,"Bob":31}	{"sint":17,"sboolean":true,"sstring":"Abe Linkedin"}	BLUE	72		NULL

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/avro_timestamp.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avro_timestamp.q.java1.7.out b/ql/src/test/results/clientpositive/avro_timestamp.q.java1.7.out
deleted file mode 100644
index d2d3b7c..0000000
--- a/ql/src/test/results/clientpositive/avro_timestamp.q.java1.7.out
+++ /dev/null
@@ -1,134 +0,0 @@
-PREHOOK: query: -- Exclude test on Windows due to space character being escaped in Hive paths on Windows.
--- EXCLUDE_OS_WINDOWS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE avro_timestamp_staging
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- Exclude test on Windows due to space character being escaped in Hive paths on Windows.
--- EXCLUDE_OS_WINDOWS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE avro_timestamp_staging
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE avro_timestamp
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE avro_timestamp
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE avro_timestamp_casts
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE avro_timestamp_casts
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE avro_timestamp_staging (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
-   ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-   COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
-   STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@avro_timestamp_staging
-POSTHOOK: query: CREATE TABLE avro_timestamp_staging (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
-   ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-   COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
-   STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@avro_timestamp_staging
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_timestamp.txt' OVERWRITE INTO TABLE avro_timestamp_staging
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@avro_timestamp_staging
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_timestamp.txt' OVERWRITE INTO TABLE avro_timestamp_staging
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@avro_timestamp_staging
-PREHOOK: query: CREATE TABLE avro_timestamp (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
-  PARTITIONED BY (p1 int, p2 timestamp)
-  ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-  COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
-  STORED AS AVRO
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@avro_timestamp
-POSTHOOK: query: CREATE TABLE avro_timestamp (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
-  PARTITIONED BY (p1 int, p2 timestamp)
-  ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-  COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
-  STORED AS AVRO
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@avro_timestamp
-PREHOOK: query: INSERT OVERWRITE TABLE avro_timestamp PARTITION(p1=2, p2='2014-09-26 07:08:09.123') SELECT * FROM avro_timestamp_staging
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_timestamp_staging
-PREHOOK: Output: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-POSTHOOK: query: INSERT OVERWRITE TABLE avro_timestamp PARTITION(p1=2, p2='2014-09-26 07:08:09.123') SELECT * FROM avro_timestamp_staging
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_timestamp_staging
-POSTHOOK: Output: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).d SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:d, type:timestamp, comment:null), ]
-POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).l1 SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:l1, type:array<timestamp>, comment:null), ]
-POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).m1 SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:m1, type:map<string,timestamp>, comment:null), ]
-PREHOOK: query: SELECT * FROM avro_timestamp
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_timestamp
-PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_timestamp
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_timestamp
-POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-2012-02-21 07:08:09.123	{"foo":"1980-12-16 07:08:09.123","bar":"1998-05-07 07:08:09.123"}	["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-2014-02-11 07:08:09.123	{"baz":"1981-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-1947-02-11 07:08:09.123	{"baz":"1921-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-8200-02-11 07:08:09.123	{"baz":"6981-12-16 07:08:09.123"}	["1039-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-PREHOOK: query: SELECT d, COUNT(d) FROM avro_timestamp GROUP BY d
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_timestamp
-PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT d, COUNT(d) FROM avro_timestamp GROUP BY d
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_timestamp
-POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-1947-02-11 07:08:09.123	1
-2012-02-21 07:08:09.123	1
-2014-02-11 07:08:09.123	1
-8200-02-11 07:08:09.123	1
-PREHOOK: query: SELECT * FROM avro_timestamp WHERE d!='1947-02-11 07:08:09.123'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_timestamp
-PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d!='1947-02-11 07:08:09.123'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_timestamp
-POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-2012-02-21 07:08:09.123	{"foo":"1980-12-16 07:08:09.123","bar":"1998-05-07 07:08:09.123"}	["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-2014-02-11 07:08:09.123	{"baz":"1981-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-8200-02-11 07:08:09.123	{"baz":"6981-12-16 07:08:09.123"}	["1039-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-PREHOOK: query: SELECT * FROM avro_timestamp WHERE d<'2014-12-21 07:08:09.123'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_timestamp
-PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d<'2014-12-21 07:08:09.123'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_timestamp
-POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-2012-02-21 07:08:09.123	{"foo":"1980-12-16 07:08:09.123","bar":"1998-05-07 07:08:09.123"}	["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-2014-02-11 07:08:09.123	{"baz":"1981-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-1947-02-11 07:08:09.123	{"baz":"1921-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-PREHOOK: query: SELECT * FROM avro_timestamp WHERE d>'8000-12-01 07:08:09.123'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_timestamp
-PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d>'8000-12-01 07:08:09.123'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_timestamp
-POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-8200-02-11 07:08:09.123	{"baz":"6981-12-16 07:08:09.123"}	["1039-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/avro_timestamp.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avro_timestamp.q.java1.8.out b/ql/src/test/results/clientpositive/avro_timestamp.q.java1.8.out
deleted file mode 100644
index 4c38347..0000000
--- a/ql/src/test/results/clientpositive/avro_timestamp.q.java1.8.out
+++ /dev/null
@@ -1,134 +0,0 @@
-PREHOOK: query: -- Exclude test on Windows due to space character being escaped in Hive paths on Windows.
--- EXCLUDE_OS_WINDOWS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE avro_timestamp_staging
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- Exclude test on Windows due to space character being escaped in Hive paths on Windows.
--- EXCLUDE_OS_WINDOWS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE avro_timestamp_staging
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE avro_timestamp
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE avro_timestamp
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE avro_timestamp_casts
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE avro_timestamp_casts
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE avro_timestamp_staging (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
-   ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-   COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
-   STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@avro_timestamp_staging
-POSTHOOK: query: CREATE TABLE avro_timestamp_staging (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
-   ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-   COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
-   STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@avro_timestamp_staging
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_timestamp.txt' OVERWRITE INTO TABLE avro_timestamp_staging
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@avro_timestamp_staging
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_timestamp.txt' OVERWRITE INTO TABLE avro_timestamp_staging
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@avro_timestamp_staging
-PREHOOK: query: CREATE TABLE avro_timestamp (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
-  PARTITIONED BY (p1 int, p2 timestamp)
-  ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-  COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
-  STORED AS AVRO
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@avro_timestamp
-POSTHOOK: query: CREATE TABLE avro_timestamp (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
-  PARTITIONED BY (p1 int, p2 timestamp)
-  ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-  COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
-  STORED AS AVRO
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@avro_timestamp
-PREHOOK: query: INSERT OVERWRITE TABLE avro_timestamp PARTITION(p1=2, p2='2014-09-26 07:08:09.123') SELECT * FROM avro_timestamp_staging
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_timestamp_staging
-PREHOOK: Output: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-POSTHOOK: query: INSERT OVERWRITE TABLE avro_timestamp PARTITION(p1=2, p2='2014-09-26 07:08:09.123') SELECT * FROM avro_timestamp_staging
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_timestamp_staging
-POSTHOOK: Output: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).d SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:d, type:timestamp, comment:null), ]
-POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).l1 SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:l1, type:array<timestamp>, comment:null), ]
-POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).m1 SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:m1, type:map<string,timestamp>, comment:null), ]
-PREHOOK: query: SELECT * FROM avro_timestamp
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_timestamp
-PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_timestamp
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_timestamp
-POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-2012-02-21 07:08:09.123	{"bar":"1998-05-07 07:08:09.123","foo":"1980-12-16 07:08:09.123"}	["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-2014-02-11 07:08:09.123	{"baz":"1981-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-1947-02-11 07:08:09.123	{"baz":"1921-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-8200-02-11 07:08:09.123	{"baz":"6981-12-16 07:08:09.123"}	["1039-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-PREHOOK: query: SELECT d, COUNT(d) FROM avro_timestamp GROUP BY d
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_timestamp
-PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT d, COUNT(d) FROM avro_timestamp GROUP BY d
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_timestamp
-POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-1947-02-11 07:08:09.123	1
-2012-02-21 07:08:09.123	1
-2014-02-11 07:08:09.123	1
-8200-02-11 07:08:09.123	1
-PREHOOK: query: SELECT * FROM avro_timestamp WHERE d!='1947-02-11 07:08:09.123'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_timestamp
-PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d!='1947-02-11 07:08:09.123'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_timestamp
-POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-2012-02-21 07:08:09.123	{"bar":"1998-05-07 07:08:09.123","foo":"1980-12-16 07:08:09.123"}	["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-2014-02-11 07:08:09.123	{"baz":"1981-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-8200-02-11 07:08:09.123	{"baz":"6981-12-16 07:08:09.123"}	["1039-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-PREHOOK: query: SELECT * FROM avro_timestamp WHERE d<'2014-12-21 07:08:09.123'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_timestamp
-PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d<'2014-12-21 07:08:09.123'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_timestamp
-POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-2012-02-21 07:08:09.123	{"bar":"1998-05-07 07:08:09.123","foo":"1980-12-16 07:08:09.123"}	["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-2014-02-11 07:08:09.123	{"baz":"1981-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-1947-02-11 07:08:09.123	{"baz":"1921-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
-PREHOOK: query: SELECT * FROM avro_timestamp WHERE d>'8000-12-01 07:08:09.123'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@avro_timestamp
-PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d>'8000-12-01 07:08:09.123'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@avro_timestamp
-POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
-#### A masked pattern was here ####
-8200-02-11 07:08:09.123	{"baz":"6981-12-16 07:08:09.123"}	["1039-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/avro_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/avro_timestamp.q.out b/ql/src/test/results/clientpositive/avro_timestamp.q.out
new file mode 100644
index 0000000..868807a
--- /dev/null
+++ b/ql/src/test/results/clientpositive/avro_timestamp.q.out
@@ -0,0 +1,132 @@
+PREHOOK: query: -- Exclude test on Windows due to space character being escaped in Hive paths on Windows.
+-- EXCLUDE_OS_WINDOWS
+
+DROP TABLE avro_timestamp_staging
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: -- Exclude test on Windows due to space character being escaped in Hive paths on Windows.
+-- EXCLUDE_OS_WINDOWS
+
+DROP TABLE avro_timestamp_staging
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE avro_timestamp
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE avro_timestamp
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: DROP TABLE avro_timestamp_casts
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE avro_timestamp_casts
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE avro_timestamp_staging (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
+   ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+   COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+   STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_timestamp_staging
+POSTHOOK: query: CREATE TABLE avro_timestamp_staging (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
+   ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+   COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+   STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_timestamp_staging
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_timestamp.txt' OVERWRITE INTO TABLE avro_timestamp_staging
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@avro_timestamp_staging
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/avro_timestamp.txt' OVERWRITE INTO TABLE avro_timestamp_staging
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@avro_timestamp_staging
+PREHOOK: query: CREATE TABLE avro_timestamp (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
+  PARTITIONED BY (p1 int, p2 timestamp)
+  ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+  COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+  STORED AS AVRO
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_timestamp
+POSTHOOK: query: CREATE TABLE avro_timestamp (d timestamp, m1 map<string, timestamp>, l1 array<timestamp>)
+  PARTITIONED BY (p1 int, p2 timestamp)
+  ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+  COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+  STORED AS AVRO
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_timestamp
+PREHOOK: query: INSERT OVERWRITE TABLE avro_timestamp PARTITION(p1=2, p2='2014-09-26 07:08:09.123') SELECT * FROM avro_timestamp_staging
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp_staging
+PREHOOK: Output: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+POSTHOOK: query: INSERT OVERWRITE TABLE avro_timestamp PARTITION(p1=2, p2='2014-09-26 07:08:09.123') SELECT * FROM avro_timestamp_staging
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp_staging
+POSTHOOK: Output: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).d SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:d, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).l1 SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:l1, type:array<timestamp>, comment:null), ]
+POSTHOOK: Lineage: avro_timestamp PARTITION(p1=2,p2=2014-09-26 07:08:09.123).m1 SIMPLE [(avro_timestamp_staging)avro_timestamp_staging.FieldSchema(name:m1, type:map<string,timestamp>, comment:null), ]
+PREHOOK: query: SELECT * FROM avro_timestamp
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_timestamp
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123	{"bar":"1998-05-07 07:08:09.123","foo":"1980-12-16 07:08:09.123"}	["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
+2014-02-11 07:08:09.123	{"baz":"1981-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
+1947-02-11 07:08:09.123	{"baz":"1921-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
+8200-02-11 07:08:09.123	{"baz":"6981-12-16 07:08:09.123"}	["1039-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
+PREHOOK: query: SELECT d, COUNT(d) FROM avro_timestamp GROUP BY d
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT d, COUNT(d) FROM avro_timestamp GROUP BY d
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+1947-02-11 07:08:09.123	1
+2012-02-21 07:08:09.123	1
+2014-02-11 07:08:09.123	1
+8200-02-11 07:08:09.123	1
+PREHOOK: query: SELECT * FROM avro_timestamp WHERE d!='1947-02-11 07:08:09.123'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d!='1947-02-11 07:08:09.123'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123	{"bar":"1998-05-07 07:08:09.123","foo":"1980-12-16 07:08:09.123"}	["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
+2014-02-11 07:08:09.123	{"baz":"1981-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
+8200-02-11 07:08:09.123	{"baz":"6981-12-16 07:08:09.123"}	["1039-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
+PREHOOK: query: SELECT * FROM avro_timestamp WHERE d<'2014-12-21 07:08:09.123'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d<'2014-12-21 07:08:09.123'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123	{"bar":"1998-05-07 07:08:09.123","foo":"1980-12-16 07:08:09.123"}	["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
+2014-02-11 07:08:09.123	{"baz":"1981-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
+1947-02-11 07:08:09.123	{"baz":"1921-12-16 07:08:09.123"}	["2011-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123
+PREHOOK: query: SELECT * FROM avro_timestamp WHERE d>'8000-12-01 07:08:09.123'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@avro_timestamp
+PREHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM avro_timestamp WHERE d>'8000-12-01 07:08:09.123'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@avro_timestamp
+POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
+#### A masked pattern was here ####
+8200-02-11 07:08:09.123	{"baz":"6981-12-16 07:08:09.123"}	["1039-09-05 07:08:09.123"]	2	2014-09-26 07:08:09.123


[33/34] hive git commit: HIVE-13409: Fix JDK8 test failures related to COLUMN_STATS_ACCURATE (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/join33.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join33.q.out b/ql/src/test/results/clientpositive/join33.q.out
index 8653c2f..bebb007 100644
--- a/ql/src/test/results/clientpositive/join33.q.out
+++ b/ql/src/test/results/clientpositive/join33.q.out
@@ -159,7 +159,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -179,7 +179,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -203,7 +203,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -223,7 +223,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -250,7 +250,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/join34.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join34.q.out b/ql/src/test/results/clientpositive/join34.q.out
index bb23644..365992b 100644
--- a/ql/src/test/results/clientpositive/join34.q.out
+++ b/ql/src/test/results/clientpositive/join34.q.out
@@ -197,7 +197,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -217,7 +217,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -241,7 +241,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -261,7 +261,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/join35.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join35.q.out b/ql/src/test/results/clientpositive/join35.q.out
index b1732ec..2c2681f 100644
--- a/ql/src/test/results/clientpositive/join35.q.out
+++ b/ql/src/test/results/clientpositive/join35.q.out
@@ -82,7 +82,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -102,7 +102,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -324,7 +324,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -344,7 +344,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -435,7 +435,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -455,7 +455,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/join9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join9.q.out b/ql/src/test/results/clientpositive/join9.q.out
index 180d46c..efddb5d 100644
--- a/ql/src/test/results/clientpositive/join9.q.out
+++ b/ql/src/test/results/clientpositive/join9.q.out
@@ -77,7 +77,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -97,7 +97,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -124,7 +124,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/join_map_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join_map_ppr.q.out b/ql/src/test/results/clientpositive/join_map_ppr.q.out
index 928d4fb..e44ceac 100644
--- a/ql/src/test/results/clientpositive/join_map_ppr.q.out
+++ b/ql/src/test/results/clientpositive/join_map_ppr.q.out
@@ -148,7 +148,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -716,7 +716,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/list_bucket_dml_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_1.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_1.q.out
index 2b7ebb2..1d43bc0 100644
--- a/ql/src/test/results/clientpositive/list_bucket_dml_1.q.out
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_1.q.out
@@ -88,7 +88,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -134,7 +134,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out
index 5016855..ebbbb26 100644
--- a/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out
@@ -85,7 +85,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -105,7 +105,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/list_bucket_dml_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_3.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_3.q.out
index 548815a..385e113 100644
--- a/ql/src/test/results/clientpositive/list_bucket_dml_3.q.out
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_3.q.out
@@ -82,7 +82,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -128,7 +128,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out
index b8757de..224ecc2 100644
--- a/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out
@@ -114,7 +114,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -160,7 +160,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -422,7 +422,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -468,7 +468,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/load_dyn_part8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/load_dyn_part8.q.out b/ql/src/test/results/clientpositive/load_dyn_part8.q.out
index 757e7dd..a8247f5 100644
--- a/ql/src/test/results/clientpositive/load_dyn_part8.q.out
+++ b/ql/src/test/results/clientpositive/load_dyn_part8.q.out
@@ -148,7 +148,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -194,7 +194,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -240,7 +240,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -286,7 +286,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/louter_join_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/louter_join_ppr.q.out b/ql/src/test/results/clientpositive/louter_join_ppr.q.out
index fd127ec..c1319f8 100644
--- a/ql/src/test/results/clientpositive/louter_join_ppr.q.out
+++ b/ql/src/test/results/clientpositive/louter_join_ppr.q.out
@@ -79,7 +79,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -99,7 +99,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -126,7 +126,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -172,7 +172,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -371,7 +371,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -391,7 +391,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -418,7 +418,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -464,7 +464,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -510,7 +510,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -556,7 +556,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -770,7 +770,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -790,7 +790,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -817,7 +817,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -863,7 +863,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1058,7 +1058,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1078,7 +1078,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1105,7 +1105,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1151,7 +1151,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out b/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out
index 85bd14b..17a1cde 100644
--- a/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out
+++ b/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out
@@ -129,7 +129,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -149,7 +149,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -173,7 +173,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -193,7 +193,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -220,7 +220,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -266,7 +266,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -312,7 +312,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -358,7 +358,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/offset_limit_global_optimizer.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/offset_limit_global_optimizer.q.out b/ql/src/test/results/clientpositive/offset_limit_global_optimizer.q.out
index 7d8655f..a3cc93e 100644
--- a/ql/src/test/results/clientpositive/offset_limit_global_optimizer.q.out
+++ b/ql/src/test/results/clientpositive/offset_limit_global_optimizer.q.out
@@ -58,7 +58,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -190,7 +190,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -236,7 +236,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -282,7 +282,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -328,7 +328,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -463,7 +463,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -509,7 +509,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -555,7 +555,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -601,7 +601,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -746,7 +746,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -792,7 +792,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -838,7 +838,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -884,7 +884,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1609,7 +1609,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1741,7 +1741,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1787,7 +1787,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1833,7 +1833,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1879,7 +1879,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -2014,7 +2014,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -2060,7 +2060,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -2106,7 +2106,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -2152,7 +2152,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -2297,7 +2297,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -2343,7 +2343,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -2389,7 +2389,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -2435,7 +2435,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/optimize_nullscan.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/optimize_nullscan.q.out b/ql/src/test/results/clientpositive/optimize_nullscan.q.out
index 4a693d6..b045cc5 100644
--- a/ql/src/test/results/clientpositive/optimize_nullscan.q.out
+++ b/ql/src/test/results/clientpositive/optimize_nullscan.q.out
@@ -64,7 +64,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -84,7 +84,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -252,7 +252,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -297,7 +297,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -342,7 +342,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -387,7 +387,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -518,7 +518,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -538,7 +538,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -660,7 +660,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -680,7 +680,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -861,7 +861,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -907,7 +907,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -953,7 +953,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -999,7 +999,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1140,7 +1140,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1185,7 +1185,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1230,7 +1230,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1275,7 +1275,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1404,7 +1404,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1424,7 +1424,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1558,7 +1558,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1578,7 +1578,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1720,7 +1720,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1740,7 +1740,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1834,7 +1834,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1854,7 +1854,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1967,7 +1967,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1987,7 +1987,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/partition_coltype_literals.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/partition_coltype_literals.q.out b/ql/src/test/results/clientpositive/partition_coltype_literals.q.out
index b7a09d2..06c178c 100644
--- a/ql/src/test/results/clientpositive/partition_coltype_literals.q.out
+++ b/ql/src/test/results/clientpositive/partition_coltype_literals.q.out
@@ -373,7 +373,7 @@ Database:           	default
 Table:              	partcoltypenum      	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 #### A masked pattern was here ####
 	numFiles            	2                   
 	numRows             	30                  
@@ -424,7 +424,7 @@ Database:           	default
 Table:              	partcoltypenum      	 
 #### A masked pattern was here ####
 Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"},\"BASIC_STATS\":\"true\"}
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
 #### A masked pattern was here ####
 	numFiles            	2                   
 	numRows             	30                  

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/pcr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/pcr.q.out b/ql/src/test/results/clientpositive/pcr.q.out
index 9daddfb..9fb1481 100644
--- a/ql/src/test/results/clientpositive/pcr.q.out
+++ b/ql/src/test/results/clientpositive/pcr.q.out
@@ -4635,7 +4635,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -4764,7 +4764,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -4810,7 +4810,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -4943,7 +4943,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -4989,7 +4989,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/pcs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/pcs.q.out b/ql/src/test/results/clientpositive/pcs.q.out
index 8b99401..0045c1c 100644
--- a/ql/src/test/results/clientpositive/pcs.q.out
+++ b/ql/src/test/results/clientpositive/pcs.q.out
@@ -120,7 +120,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-08
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -165,7 +165,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-09
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -310,7 +310,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-08
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -355,7 +355,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-09
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -472,7 +472,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-08
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -517,7 +517,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-09
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -626,7 +626,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-08
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -671,7 +671,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-09
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -827,7 +827,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-08
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -872,7 +872,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-09
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -1164,7 +1164,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-08
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -1303,7 +1303,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-08
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -1348,7 +1348,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-09
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -1413,7 +1413,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-08
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -1456,7 +1456,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-09
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -1499,7 +1499,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-10
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -1586,7 +1586,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-08
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -1629,7 +1629,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-09
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 
@@ -1672,7 +1672,7 @@ STAGE PLANS:
             partition values:
               ds 2000-04-10
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/ppd_join_filter.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/ppd_join_filter.q.out b/ql/src/test/results/clientpositive/ppd_join_filter.q.out
index d8e5009..b63161b 100644
--- a/ql/src/test/results/clientpositive/ppd_join_filter.q.out
+++ b/ql/src/test/results/clientpositive/ppd_join_filter.q.out
@@ -65,7 +65,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -85,7 +85,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -202,7 +202,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -222,7 +222,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -387,7 +387,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -407,7 +407,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -524,7 +524,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -544,7 +544,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -709,7 +709,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -729,7 +729,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -846,7 +846,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -866,7 +866,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1031,7 +1031,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1051,7 +1051,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -1168,7 +1168,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -1188,7 +1188,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'

http://git-wip-us.apache.org/repos/asf/hive/blob/cdb872a1/ql/src/test/results/clientpositive/ppd_vc.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/ppd_vc.q.out b/ql/src/test/results/clientpositive/ppd_vc.q.out
index a82a709..21181ac 100644
--- a/ql/src/test/results/clientpositive/ppd_vc.q.out
+++ b/ql/src/test/results/clientpositive/ppd_vc.q.out
@@ -62,7 +62,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -108,7 +108,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -154,7 +154,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -200,7 +200,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -370,7 +370,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.TextInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -390,7 +390,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
                 columns key,value
                 columns.comments 'default','default'
@@ -417,7 +417,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -463,7 +463,7 @@ STAGE PLANS:
               ds 2008-04-08
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -509,7 +509,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 11
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'
@@ -555,7 +555,7 @@ STAGE PLANS:
               ds 2008-04-09
               hr 12
             properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
               columns key,value
               columns.comments 'default','default'


[13/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_9.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_9.q.out
new file mode 100644
index 0000000..81f3af3
--- /dev/null
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_9.q.out
@@ -0,0 +1,811 @@
+PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- list bucketing DML: static partition. multiple skewed columns. merge.
+-- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+--  5263 000000_0
+--  5263 000001_0
+-- ds=2008-04-08/hr=11/key=103:
+-- 99 000000_0
+-- 99 000001_0
+-- after merge
+-- 142 000000_0
+-- ds=2008-04-08/hr=11/key=484:
+-- 87 000000_0
+-- 87 000001_0
+-- after merge
+-- 118 000001_0
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key) on ('484','103')
+    stored as DIRECTORIES
+    STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@list_bucketing_static_part
+POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- list bucketing DML: static partition. multiple skewed columns. merge.
+-- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+--  5263 000000_0
+--  5263 000001_0
+-- ds=2008-04-08/hr=11/key=103:
+-- 99 000000_0
+-- 99 000001_0
+-- after merge
+-- 142 000000_0
+-- ds=2008-04-08/hr=11/key=484:
+-- 87 000000_0
+-- 87 000001_0
+-- after merge
+-- 118 000001_0
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key) on ('484','103')
+    stored as DIRECTORIES
+    STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@list_bucketing_static_part
+PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/hr=11/
+                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_static_part
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_static_part { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_static_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /srcpart/ds=2008-04-08/hr=11 [srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [srcpart]
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 11
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_static_part
+ds=2008-04-08/hr=11
+PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	list_bucketing_static_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	6                   
+	numRows             	1000                
+	rawDataSize         	9624                
+	totalSize           	10898               
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key]               	 
+Skewed Values:      	[[484], [103]]      	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103, [484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
+  Stage-2 depends on stages: Stage-0
+  Stage-3
+  Stage-5
+  Stage-6 depends on stages: Stage-5
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/hr=11/
+                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_static_part
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_static_part { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_static_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /srcpart/ds=2008-04-08/hr=11 [srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [srcpart]
+
+  Stage: Stage-7
+    Conditional Operator
+
+  Stage: Stage-4
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 11
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+  Stage: Stage-3
+    Merge File Operator
+      Map Operator Tree:
+          RCFile Merge Operator
+      merge level: block
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_static_part
+              partition_columns.types string:string
+              serialization.ddl struct list_bucketing_static_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+            name: default.list_bucketing_static_part
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+
+  Stage: Stage-5
+    Merge File Operator
+      Map Operator Tree:
+          RCFile Merge Operator
+      merge level: block
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_static_part
+              partition_columns.types string:string
+              serialization.ddl struct list_bucketing_static_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+            name: default.list_bucketing_static_part
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+
+  Stage: Stage-6
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_static_part
+ds=2008-04-08/hr=11
+PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	list_bucketing_static_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	4                   
+	numRows             	1000                
+	rawDataSize         	9624                
+	totalSize           	10786               
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key]               	 
+Skewed Values:      	[[484], [103]]      	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103, [484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+1000
+PREHOOK: query: select count(*) from list_bucketing_static_part
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from list_bucketing_static_part
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+1000
+PREHOOK: query: explain extended
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_static_part
+              numFiles 4
+              numRows 1000
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 9624
+              serialization.ddl struct list_bucketing_static_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 10786
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+            name: default.list_bucketing_static_part
+      Processor Tree:
+        TableScan
+          alias: list_bucketing_static_part
+          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
+            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: '484' (type: string), 'val_484' (type: string), '2008-04-08' (type: string), '11' (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+              ListSink
+
+PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+484	val_484	2008-04-08	11
+484	val_484	2008-04-08	11
+PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+484	val_484	2008-04-08	11
+484	val_484	2008-04-08	12
+PREHOOK: query: -- clean up
+drop table list_bucketing_static_part
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Output: default@list_bucketing_static_part
+POSTHOOK: query: -- clean up
+drop table list_bucketing_static_part
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Output: default@list_bucketing_static_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/llap/join0.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/join0.q.java1.7.out b/ql/src/test/results/clientpositive/llap/join0.q.java1.7.out
deleted file mode 100644
index 5651839..0000000
--- a/ql/src/test/results/clientpositive/llap/join0.q.java1.7.out
+++ /dev/null
@@ -1,242 +0,0 @@
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: string)
-            Execution mode: llap
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: string)
-            Execution mode: llap
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Merge Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                keys:
-                  0 
-                  1 
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-                  sort order: ++++
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-        Reducer 3 
-            Execution mode: uber
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
-PREHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-#### A masked pattern was here ####
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
-PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	9	val_9
-0	val_0	9	val_9
-0	val_0	9	val_9
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	2	val_2
-2	val_2	4	val_4
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	8	val_8
-2	val_2	9	val_9
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	2	val_2
-4	val_4	4	val_4
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	8	val_8
-4	val_4	9	val_9
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	9	val_9
-5	val_5	9	val_9
-5	val_5	9	val_9
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	2	val_2
-8	val_8	4	val_4
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	8	val_8
-8	val_8	9	val_9
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	2	val_2
-9	val_9	4	val_4
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	8	val_8
-9	val_9	9	val_9

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/llap/join0.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/join0.q.java1.8.out b/ql/src/test/results/clientpositive/llap/join0.q.java1.8.out
deleted file mode 100644
index 5651839..0000000
--- a/ql/src/test/results/clientpositive/llap/join0.q.java1.8.out
+++ /dev/null
@@ -1,242 +0,0 @@
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
--- SORT_QUERY_RESULTS
-
-EXPLAIN
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: string)
-            Execution mode: llap
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (key < 10) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string), value (type: string)
-                      outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: string), _col1 (type: string)
-            Execution mode: llap
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Merge Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                keys:
-                  0 
-                  1 
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-                  sort order: ++++
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-        Reducer 3 
-            Execution mode: uber
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
-PREHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN FORMATTED
-SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-#### A masked pattern was here ####
-Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
-PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
-       src2.key as k2, src2.value as v2 FROM 
-  (SELECT * FROM src WHERE src.key < 10) src1 
-    JOIN 
-  (SELECT * FROM src WHERE src.key < 10) src2
-  SORT BY k1, v1, k2, v2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-#### A masked pattern was here ####
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	0	val_0
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	2	val_2
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	4	val_4
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	5	val_5
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	8	val_8
-0	val_0	9	val_9
-0	val_0	9	val_9
-0	val_0	9	val_9
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	0	val_0
-2	val_2	2	val_2
-2	val_2	4	val_4
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	5	val_5
-2	val_2	8	val_8
-2	val_2	9	val_9
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	0	val_0
-4	val_4	2	val_2
-4	val_4	4	val_4
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	5	val_5
-4	val_4	8	val_8
-4	val_4	9	val_9
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	0	val_0
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	9	val_9
-5	val_5	9	val_9
-5	val_5	9	val_9
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	2	val_2
-8	val_8	4	val_4
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	8	val_8
-8	val_8	9	val_9
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	2	val_2
-9	val_9	4	val_4
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	8	val_8
-9	val_9	9	val_9

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/llap/join0.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/join0.q.out b/ql/src/test/results/clientpositive/llap/join0.q.out
new file mode 100644
index 0000000..f177afc
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/join0.q.out
@@ -0,0 +1,243 @@
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+POSTHOOK: query: -- SORT_QUERY_RESULTS
+
+EXPLAIN
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Map 4 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (key < 10) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 4 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (key < 10) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 
+                  1 
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
+                  sort order: ++++
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: EXPLAIN FORMATTED
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN FORMATTED
+SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+#### A masked pattern was here ####
+Warning: Shuffle Join MERGEJOIN[15][tables = [src1, src2]] in Stage 'Reducer 2' is a cross product
+PREHOOK: query: SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT src1.key as k1, src1.value as v1, 
+       src2.key as k2, src2.value as v2 FROM 
+  (SELECT * FROM src WHERE src.key < 10) src1 
+    JOIN 
+  (SELECT * FROM src WHERE src.key < 10) src2
+  SORT BY k1, v1, k2, v2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	0	val_0
+0	val_0	2	val_2
+0	val_0	2	val_2
+0	val_0	2	val_2
+0	val_0	4	val_4
+0	val_0	4	val_4
+0	val_0	4	val_4
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	5	val_5
+0	val_0	8	val_8
+0	val_0	8	val_8
+0	val_0	8	val_8
+0	val_0	9	val_9
+0	val_0	9	val_9
+0	val_0	9	val_9
+2	val_2	0	val_0
+2	val_2	0	val_0
+2	val_2	0	val_0
+2	val_2	2	val_2
+2	val_2	4	val_4
+2	val_2	5	val_5
+2	val_2	5	val_5
+2	val_2	5	val_5
+2	val_2	8	val_8
+2	val_2	9	val_9
+4	val_4	0	val_0
+4	val_4	0	val_0
+4	val_4	0	val_0
+4	val_4	2	val_2
+4	val_4	4	val_4
+4	val_4	5	val_5
+4	val_4	5	val_5
+4	val_4	5	val_5
+4	val_4	8	val_8
+4	val_4	9	val_9
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	0	val_0
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	8	val_8
+5	val_5	8	val_8
+5	val_5	8	val_8
+5	val_5	9	val_9
+5	val_5	9	val_9
+5	val_5	9	val_9
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	2	val_2
+8	val_8	4	val_4
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	8	val_8
+8	val_8	9	val_9
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	2	val_2
+9	val_9	4	val_4
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	8	val_8
+9	val_9	9	val_9

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.7.out b/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.7.out
deleted file mode 100644
index 22b5d93..0000000
--- a/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.7.out
+++ /dev/null
@@ -1,217 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE over1korc
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE over1korc
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k
-POSTHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over1k
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over1k
-PREHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1korc
-PREHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1korc
-POSTHOOK: Lineage: over1korc.b SIMPLE [(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1korc.bin SIMPLE [(over1k)over1k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: over1korc.bo SIMPLE [(over1k)over1k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: over1korc.d SIMPLE [(over1k)over1k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: over1korc.dec SIMPLE [(over1k)over1k.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
-POSTHOOK: Lineage: over1korc.f SIMPLE [(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1korc.i SIMPLE [(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1korc.s SIMPLE [(over1k)over1k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: over1korc.si SIMPLE [(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1korc.t SIMPLE [(over1k)over1k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: over1korc.ts SIMPLE [(over1k)over1k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-PREHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: over1korc
-                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: i (type: int)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                    Group By Operator
-                      aggregations: avg(50), avg(50.0), avg(50)
-                      keys: _col0 (type: int)
-                      mode: hash
-                      outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: int)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: int)
-                        Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col1 (type: struct<count:bigint,sum:double,input:int>), _col2 (type: struct<count:bigint,sum:double,input:double>), _col3 (type: struct<count:bigint,sum:decimal(12,0),input:decimal(10,0)>)
-            Execution mode: vectorized, llap
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: avg(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2)
-                keys: KEY._col0 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: int)
-                  sort order: +
-                  Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: decimal(14,4))
-        Reducer 3 
-            Execution mode: vectorized, uber
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: decimal(14,4))
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                Limit
-                  Number of rows: 10
-                  Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: 10
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-65536	50.0	50.0	50
-65537	50.0	50.0	50
-65538	50.0	50.0	50
-65539	50.0	50.0	50
-65540	50.0	50.0	50
-65541	50.0	50.0	50
-65542	50.0	50.0	50
-65543	50.0	50.0	50
-65544	50.0	50.0	50
-65545	50.0	50.0	50

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.8.out b/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.8.out
deleted file mode 100644
index 22b5d93..0000000
--- a/ql/src/test/results/clientpositive/llap/vector_cast_constant.q.java1.8.out
+++ /dev/null
@@ -1,217 +0,0 @@
-PREHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: -- JAVA_VERSION_SPECIFIC_OUTPUT
-
-DROP TABLE over1k
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: DROP TABLE over1korc
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE over1korc
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k
-POSTHOOK: query: -- data setup
-CREATE TABLE over1k(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
-STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over1k
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE over1k
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over1k
-PREHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: CREATE TABLE over1korc(t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-STORED AS ORC
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1korc
-PREHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1korc
-POSTHOOK: query: INSERT INTO TABLE over1korc SELECT * FROM over1k
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1korc
-POSTHOOK: Lineage: over1korc.b SIMPLE [(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1korc.bin SIMPLE [(over1k)over1k.FieldSchema(name:bin, type:binary, comment:null), ]
-POSTHOOK: Lineage: over1korc.bo SIMPLE [(over1k)over1k.FieldSchema(name:bo, type:boolean, comment:null), ]
-POSTHOOK: Lineage: over1korc.d SIMPLE [(over1k)over1k.FieldSchema(name:d, type:double, comment:null), ]
-POSTHOOK: Lineage: over1korc.dec SIMPLE [(over1k)over1k.FieldSchema(name:dec, type:decimal(4,2), comment:null), ]
-POSTHOOK: Lineage: over1korc.f SIMPLE [(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1korc.i SIMPLE [(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1korc.s SIMPLE [(over1k)over1k.FieldSchema(name:s, type:string, comment:null), ]
-POSTHOOK: Lineage: over1korc.si SIMPLE [(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1korc.t SIMPLE [(over1k)over1k.FieldSchema(name:t, type:tinyint, comment:null), ]
-POSTHOOK: Lineage: over1korc.ts SIMPLE [(over1k)over1k.FieldSchema(name:ts, type:timestamp, comment:null), ]
-PREHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: over1korc
-                  Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: i (type: int)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                    Group By Operator
-                      aggregations: avg(50), avg(50.0), avg(50)
-                      keys: _col0 (type: int)
-                      mode: hash
-                      outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: int)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: int)
-                        Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col1 (type: struct<count:bigint,sum:double,input:int>), _col2 (type: struct<count:bigint,sum:double,input:double>), _col3 (type: struct<count:bigint,sum:decimal(12,0),input:decimal(10,0)>)
-            Execution mode: vectorized, llap
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: avg(VALUE._col0), avg(VALUE._col1), avg(VALUE._col2)
-                keys: KEY._col0 (type: int)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: int)
-                  sort order: +
-                  Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: double), _col2 (type: double), _col3 (type: decimal(14,4))
-        Reducer 3 
-            Execution mode: vectorized, uber
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: double), VALUE._col1 (type: double), VALUE._col2 (type: decimal(14,4))
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 524 Data size: 155436 Basic stats: COMPLETE Column stats: NONE
-                Limit
-                  Number of rows: 10
-                  Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 10 Data size: 2960 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: 10
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT 
-  i,
-  AVG(CAST(50 AS INT)) AS `avg_int_ok`,
-  AVG(CAST(50 AS DOUBLE)) AS `avg_double_ok`,
-  AVG(CAST(50 AS DECIMAL)) AS `avg_decimal_ok`
-  FROM over1korc GROUP BY i ORDER BY i LIMIT 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1korc
-#### A masked pattern was here ####
-65536	50.0	50.0	50
-65537	50.0	50.0	50
-65538	50.0	50.0	50
-65539	50.0	50.0	50
-65540	50.0	50.0	50
-65541	50.0	50.0	50
-65542	50.0	50.0	50
-65543	50.0	50.0	50
-65544	50.0	50.0	50
-65545	50.0	50.0	50


[16/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_6.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_6.q.out
new file mode 100644
index 0000000..e53fee7
--- /dev/null
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_6.q.out
@@ -0,0 +1,1005 @@
+PREHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed columns. merge.
+-- The following explains merge example used in this test case
+-- DML will generated 2 partitions
+-- ds=2008-04-08/hr=a1
+-- ds=2008-04-08/hr=b1
+-- without merge, each partition has more files
+-- ds=2008-04-08/hr=a1 has 2 files
+-- ds=2008-04-08/hr=b1 has 6 files
+-- with merge each partition has more files
+-- ds=2008-04-08/hr=a1 has 1 files
+-- ds=2008-04-08/hr=b1 has 4 files
+-- The following shows file size and name in each directory
+-- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+-- without merge
+-- 155 000000_0
+-- 155 000001_0
+-- with merge
+-- 254 000000_0
+-- hr=b1/key=103/value=val_103:
+-- without merge
+-- 99 000000_0
+-- 99 000001_0
+-- with merge
+-- 142 000001_0
+-- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+-- without merge
+-- 5181 000000_0
+-- 5181 000001_0
+-- with merge
+-- 5181 000000_0
+-- 5181 000001_0
+-- hr=b1/key=484/value=val_484
+-- without merge
+-- 87 000000_0
+-- 87 000001_0
+-- with merge
+-- 118 000002_0 
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- create a skewed table
+create table list_bucketing_dynamic_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed columns. merge.
+-- The following explains merge example used in this test case
+-- DML will generated 2 partitions
+-- ds=2008-04-08/hr=a1
+-- ds=2008-04-08/hr=b1
+-- without merge, each partition has more files
+-- ds=2008-04-08/hr=a1 has 2 files
+-- ds=2008-04-08/hr=b1 has 6 files
+-- with merge each partition has more files
+-- ds=2008-04-08/hr=a1 has 1 files
+-- ds=2008-04-08/hr=b1 has 4 files
+-- The following shows file size and name in each directory
+-- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+-- without merge
+-- 155 000000_0
+-- 155 000001_0
+-- with merge
+-- 254 000000_0
+-- hr=b1/key=103/value=val_103:
+-- without merge
+-- 99 000000_0
+-- 99 000001_0
+-- with merge
+-- 142 000001_0
+-- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+-- without merge
+-- 5181 000000_0
+-- 5181 000001_0
+-- with merge
+-- 5181 000000_0
+-- 5181 000001_0
+-- hr=b1/key=484/value=val_484
+-- without merge
+-- 87 000000_0
+-- 87 000001_0
+-- with merge
+-- 118 000002_0 
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+-- SORT_QUERY_RESULTS
+
+-- create a skewed table
+create table list_bucketing_dynamic_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
+    stored as DIRECTORIES
+    STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@list_bucketing_dynamic_part
+PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/
+                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_dynamic_part
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_dynamic_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /srcpart/ds=2008-04-08/hr=11 [srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [srcpart]
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
+POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_dynamic_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_dynamic_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+ds=2008-04-08/hr=a1
+ds=2008-04-08/hr=b1
+PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, a1]    	 
+Database:           	default             	 
+Table:              	list_bucketing_dynamic_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	2                   
+	numRows             	16                  
+	rawDataSize         	136                 
+	totalSize           	310                 
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, b1]    	 
+Database:           	default             	 
+Table:              	list_bucketing_dynamic_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	6                   
+	numRows             	984                 
+	rawDataSize         	9488                
+	totalSize           	10734               
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
+  Stage-4
+  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
+  Stage-2 depends on stages: Stage-0
+  Stage-3
+  Stage-5
+  Stage-6 depends on stages: Stage-5
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: srcpart
+            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
+              outputColumnNames: _col0, _col1, _col2
+              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/
+                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_dynamic_part
+                      partition_columns hr
+                      partition_columns.types string
+                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_dynamic_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=11
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+#### A masked pattern was here ####
+          Partition
+            base file name: hr=12
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 12
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.srcpart
+              numFiles 1
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 5312
+              serialization.ddl struct srcpart { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.srcpart
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct srcpart { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.srcpart
+            name: default.srcpart
+      Truncated Path -> Alias:
+        /srcpart/ds=2008-04-08/hr=11 [srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [srcpart]
+
+  Stage: Stage-7
+    Conditional Operator
+
+  Stage: Stage-4
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns hr
+                partition_columns.types string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+  Stage: Stage-3
+    Merge File Operator
+      Map Operator Tree:
+          RCFile Merge Operator
+      merge level: block
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_dynamic_part
+              partition_columns hr
+              partition_columns.types string
+              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns hr
+                partition_columns.types string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+            name: default.list_bucketing_dynamic_part
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+
+  Stage: Stage-5
+    Merge File Operator
+      Map Operator Tree:
+          RCFile Merge Operator
+      merge level: block
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            properties:
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_dynamic_part
+              partition_columns hr
+              partition_columns.types string
+              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns hr
+                partition_columns.types string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+            name: default.list_bucketing_dynamic_part
+      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+      Truncated Path -> Alias:
+#### A masked pattern was here ####
+
+  Stage: Stage-6
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
+POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
+select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_dynamic_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_dynamic_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+ds=2008-04-08/hr=a1
+ds=2008-04-08/hr=b1
+PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, a1]    	 
+Database:           	default             	 
+Table:              	list_bucketing_dynamic_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	1                   
+	numRows             	16                  
+	rawDataSize         	136                 
+	totalSize           	254                 
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, b1]    	 
+Database:           	default             	 
+Table:              	list_bucketing_dynamic_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	4                   
+	numRows             	984                 
+	rawDataSize         	9488                
+	totalSize           	10622               
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key, value]        	 
+Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484, [103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+1000
+PREHOOK: query: select count(*) from list_bucketing_dynamic_part
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_dynamic_part
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+#### A masked pattern was here ####
+POSTHOOK: query: select count(*) from list_bucketing_dynamic_part
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+#### A masked pattern was here ####
+1000
+PREHOOK: query: explain extended
+select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr a1
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_dynamic_part
+              numFiles 1
+              numRows 16
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 136
+              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 254
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+            name: default.list_bucketing_dynamic_part
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr b1
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_dynamic_part
+              numFiles 4
+              numRows 984
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 9488
+              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 10622
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_dynamic_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_dynamic_part
+            name: default.list_bucketing_dynamic_part
+      Processor Tree:
+        TableScan
+          alias: list_bucketing_dynamic_part
+          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
+            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+              ListSink
+
+PREHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_dynamic_part
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
+POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
+#### A masked pattern was here ####
+484	val_484	2008-04-08	b1
+484	val_484	2008-04-08	b1
+PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+#### A masked pattern was here ####
+484	val_484	2008-04-08	11
+484	val_484	2008-04-08	12
+PREHOOK: query: -- clean up
+drop table list_bucketing_dynamic_part
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@list_bucketing_dynamic_part
+PREHOOK: Output: default@list_bucketing_dynamic_part
+POSTHOOK: query: -- clean up
+drop table list_bucketing_dynamic_part
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@list_bucketing_dynamic_part
+POSTHOOK: Output: default@list_bucketing_dynamic_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out
deleted file mode 100644
index de1305f..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out
+++ /dev/null
@@ -1,641 +0,0 @@
-PREHOOK: query: -- list bucketing alter table ... concatenate: 
--- Use list bucketing DML to generate mutilple files in partitions by turning off merge
--- dynamic partition. multiple skewed columns. merge.
--- The following explains merge example used in this test case
--- DML will generated 2 partitions
--- ds=2008-04-08/hr=a1
--- ds=2008-04-08/hr=b1
--- without merge, each partition has more files
--- ds=2008-04-08/hr=a1 has 2 files
--- ds=2008-04-08/hr=b1 has 6 files
--- with merge each partition has more files
--- ds=2008-04-08/hr=a1 has 1 files
--- ds=2008-04-08/hr=b1 has 4 files
--- The following shows file size and name in each directory
--- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 155 000000_0
--- 155 000001_0
--- with merge
--- 254 000000_0
--- hr=b1/key=103/value=val_103:
--- without merge
--- 99 000000_0
--- 99 000001_0
--- with merge
--- 142 000001_0
--- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 5181 000000_0
--- 5181 000001_0
--- with merge
--- 5181 000000_0
--- 5181 000001_0
--- hr=b1/key=484/value=val_484
--- without merge
--- 87 000000_0
--- 87 000001_0
--- with merge
--- 118 000002_0 
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- list bucketing alter table ... concatenate: 
--- Use list bucketing DML to generate mutilple files in partitions by turning off merge
--- dynamic partition. multiple skewed columns. merge.
--- The following explains merge example used in this test case
--- DML will generated 2 partitions
--- ds=2008-04-08/hr=a1
--- ds=2008-04-08/hr=b1
--- without merge, each partition has more files
--- ds=2008-04-08/hr=a1 has 2 files
--- ds=2008-04-08/hr=b1 has 6 files
--- with merge each partition has more files
--- ds=2008-04-08/hr=a1 has 1 files
--- ds=2008-04-08/hr=b1 has 4 files
--- The following shows file size and name in each directory
--- hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 155 000000_0
--- 155 000001_0
--- with merge
--- 254 000000_0
--- hr=b1/key=103/value=val_103:
--- without merge
--- 99 000000_0
--- 99 000001_0
--- with merge
--- 142 000001_0
--- hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 5181 000000_0
--- 5181 000001_0
--- with merge
--- 5181 000000_0
--- 5181 000001_0
--- hr=b1/key=484/value=val_484
--- without merge
--- 87 000000_0
--- 87 000001_0
--- with merge
--- 118 000002_0 
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_dynamic_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string), if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
-              outputColumnNames: _col0, _col1, _col2
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_dynamic_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_dynamic_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
-POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition (ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=a1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part PARTITION(ds=2008-04-08,hr=b1).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-ds=2008-04-08/hr=a1
-ds=2008-04-08/hr=b1
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='a1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, a1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	2                   
-	numRows             	16                  
-	rawDataSize         	136                 
-	totalSize           	310                 
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, b1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	6                   
-	numRows             	984                 
-	rawDataSize         	9488                
-	totalSize           	10734               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103, [484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: -- concatenate the partition and it will merge files
-alter table list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1') concatenate
-PREHOOK: type: ALTER_PARTITION_MERGE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-POSTHOOK: query: -- concatenate the partition and it will merge files
-alter table list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1') concatenate
-POSTHOOK: type: ALTER_PARTITION_MERGE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition (ds='2008-04-08', hr='b1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, b1]    	 
-Database:           	default             	 
-Table:              	list_bucketing_dynamic_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	numFiles            	3                   
-	numRows             	984                 
-	rawDataSize         	9488                
-	totalSize           	10586               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103, val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103, [484, val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(*) from list_bucketing_dynamic_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from list_bucketing_dynamic_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-1000
-PREHOOK: query: explain extended
-select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr a1
-            properties:
-              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              numFiles 2
-              numRows 16
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 136
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 310
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr b1
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_dynamic_part
-              numFiles 3
-              numRows 984
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 9488
-              serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 10586
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_dynamic_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_dynamic_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_dynamic_part
-            name: default.list_bucketing_dynamic_part
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_dynamic_part
-          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
-            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-484	val_484	2008-04-08	b1
-484	val_484	2008-04-08	b1
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' order by hr
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484' order by hr
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	12
-PREHOOK: query: -- clean up
-drop table list_bucketing_dynamic_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- clean up
-drop table list_bucketing_dynamic_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Output: default@list_bucketing_dynamic_part


[23/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.8.out b/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.8.out
deleted file mode 100644
index 00a6235..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.8.out
+++ /dev/null
@@ -1,424 +0,0 @@
-PREHOOK: query: -- Ensure it works if skewed column is not the first column in the table columns
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (value) on ('val_466','val_287','val_82')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- Ensure it works if skewed column is not the first column in the table columns
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (value) on ('val_466','val_287','val_82')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from src
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from src
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            src
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_static_part
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-                  '11'
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE true
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-        /src [src]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	4                   
-	numRows             	500                 
-	rawDataSize         	4812                
-	totalSize           	5522                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[value]             	 
-Skewed Values:      	[[val_466], [val_287], [val_82]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[val_287]=/list_bucketing_static_part/ds=2008-04-08/hr=11/value=val_287, [val_82]=/list_bucketing_static_part/ds=2008-04-08/hr=11/value=val_82, [val_466]=/list_bucketing_static_part/ds=2008-04-08/hr=11/value=val_466}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: explain extended
-select key, value from list_bucketing_static_part where ds='2008-04-08' and hr='11' and value = "val_466"
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select key, value from list_bucketing_static_part where ds='2008-04-08' and hr='11' and value = "val_466"
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            list_bucketing_static_part
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-      TOK_WHERE
-         and
-            and
-               =
-                  TOK_TABLE_OR_COL
-                     ds
-                  '2008-04-08'
-               =
-                  TOK_TABLE_OR_COL
-                     hr
-                  '11'
-            =
-               TOK_TABLE_OR_COL
-                  value
-               "val_466"
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: list_bucketing_static_part
-            Statistics: Num rows: 500 Data size: 4812 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: (value = 'val_466') (type: boolean)
-              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: key (type: string), 'val_466' (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-                  Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0,_col1
-                        columns.types string:string
-                        escape.delim \
-                        hive.serialization.extend.nesting.levels true
-                        serialization.format 1
-                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: value=val_466
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              numFiles 4
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 4812
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 5522
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      Truncated Path -> Alias:
-        /list_bucketing_static_part/ds=2008-04-08/hr=11/value=val_466 [$hdt$_0:list_bucketing_static_part]
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select key, value from list_bucketing_static_part where ds='2008-04-08' and hr='11' and value = "val_466"
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select key, value from list_bucketing_static_part where ds='2008-04-08' and hr='11' and value = "val_466"
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-466	val_466
-466	val_466
-466	val_466
-PREHOOK: query: drop table list_bucketing_static_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: drop table list_bucketing_static_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Output: default@list_bucketing_static_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_11.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_11.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_11.q.out
new file mode 100644
index 0000000..ecf54a8
--- /dev/null
+++ b/ql/src/test/results/clientpositive/list_bucket_dml_11.q.out
@@ -0,0 +1,327 @@
+PREHOOK: query: -- Ensure it works if skewed column is not the first column in the table columns
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+
+-- list bucketing DML: static partition. multiple skewed columns.
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (value) on ('val_466','val_287','val_82')
+    stored as DIRECTORIES
+    STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@list_bucketing_static_part
+POSTHOOK: query: -- Ensure it works if skewed column is not the first column in the table columns
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
+
+-- list bucketing DML: static partition. multiple skewed columns.
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (value) on ('val_466','val_287','val_82')
+    stored as DIRECTORIES
+    STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@list_bucketing_static_part
+PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from src
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from src
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            GatherStats: false
+            Select Operator
+              expressions: key (type: string), value (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 1
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Static Partition Specification: ds=2008-04-08/hr=11/
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                    properties:
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.list_bucketing_static_part
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct list_bucketing_static_part { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                    name: default.list_bucketing_static_part
+                TotalFiles: 1
+                GatherStats: true
+                MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: src
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+              bucket_count -1
+              columns key,value
+              columns.comments 'default','default'
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.src
+              numFiles 1
+              numRows 500
+              rawDataSize 5312
+              serialization.ddl struct src { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 5812
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                bucket_count -1
+                columns key,value
+                columns.comments 'default','default'
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.src
+                numFiles 1
+                numRows 500
+                rawDataSize 5312
+                serialization.ddl struct src { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 5812
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.src
+            name: default.src
+      Truncated Path -> Alias:
+        /src [src]
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 11
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
+select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
+select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_static_part
+ds=2008-04-08/hr=11
+PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	list_bucketing_static_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	4                   
+	numRows             	500                 
+	rawDataSize         	4812                
+	totalSize           	5522                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[value]             	 
+Skewed Values:      	[[val_466], [val_287], [val_82]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[val_287]=/list_bucketing_static_part/ds=2008-04-08/hr=11/value=val_287, [val_82]=/list_bucketing_static_part/ds=2008-04-08/hr=11/value=val_82, [val_466]=/list_bucketing_static_part/ds=2008-04-08/hr=11/value=val_466}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: explain extended
+select key, value from list_bucketing_static_part where ds='2008-04-08' and hr='11' and value = "val_466"
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+select key, value from list_bucketing_static_part where ds='2008-04-08' and hr='11' and value = "val_466"
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+            partition values:
+              ds 2008-04-08
+              hr 11
+            properties:
+              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types string:string
+#### A masked pattern was here ####
+              name default.list_bucketing_static_part
+              numFiles 4
+              numRows 500
+              partition_columns ds/hr
+              partition_columns.types string:string
+              rawDataSize 4812
+              serialization.ddl struct list_bucketing_static_part { string key, string value}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              totalSize 5522
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+          
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+            name: default.list_bucketing_static_part
+      Processor Tree:
+        TableScan
+          alias: list_bucketing_static_part
+          Statistics: Num rows: 500 Data size: 4812 Basic stats: COMPLETE Column stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: (value = 'val_466') (type: boolean)
+            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+            Select Operator
+              expressions: key (type: string), 'val_466' (type: string)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
+              ListSink
+
+PREHOOK: query: select key, value from list_bucketing_static_part where ds='2008-04-08' and hr='11' and value = "val_466"
+PREHOOK: type: QUERY
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+POSTHOOK: query: select key, value from list_bucketing_static_part where ds='2008-04-08' and hr='11' and value = "val_466"
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+#### A masked pattern was here ####
+466	val_466
+466	val_466
+466	val_466
+PREHOOK: query: drop table list_bucketing_static_part
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+PREHOOK: Output: default@list_bucketing_static_part
+POSTHOOK: query: drop table list_bucketing_static_part
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: Output: default@list_bucketing_static_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.7.out
deleted file mode 100644
index 0be7f4e..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.7.out
+++ /dev/null
@@ -1,426 +0,0 @@
-PREHOOK: query: -- Ensure it works if skewed column is not the first column in the table columns
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
-create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 
-    partitioned by (ds String, hr String) 
-    skewed by (col2, col4) on (('466','val_466'),('287','val_287'),('82','val_82'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_mul_col
-POSTHOOK: query: -- Ensure it works if skewed column is not the first column in the table columns
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
-create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 
-    partitioned by (ds String, hr String) 
-    skewed by (col2, col4) on (('466','val_466'),('287','val_287'),('82','val_82'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_mul_col
-PREHOOK: query: -- list bucketing DML 
-explain extended
-insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08',  hr = '11')
-select 1, key, 1, value, 1 from src
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML 
-explain extended
-insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08',  hr = '11')
-select 1, key, 1, value, 1 from src
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: '1' (type: string), key (type: string), '1' (type: string), value (type: string), '1' (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns col1,col2,col3,col4,col5
-                      columns.comments 
-                      columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_mul_col
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_mul_col
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-        /src [src]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns col1,col2,col3,col4,col5
-                columns.comments 
-                columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_mul_col
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_mul_col
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08', hr = '11')
-select 1, key, 1, value, 1 from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08', hr = '11')
-select 1, key, 1, value, 1 from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col1 EXPRESSION []
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col2 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col3 EXPRESSION []
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col4 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col5 EXPRESSION []
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_mul_col
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_mul_col
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_mul_col
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_mul_col partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: query: desc formatted list_bucketing_mul_col partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_mul_col
-# col_name            	data_type           	comment             
-	 	 
-col1                	string              	                    
-col2                	string              	                    
-col3                	string              	                    
-col4                	string              	                    
-col5                	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_mul_col	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	4                   
-	numRows             	500                 
-	rawDataSize         	6312                
-	totalSize           	7094                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[col2, col4]        	 
-Skewed Values:      	[[466, val_466], [287, val_287], [82, val_82]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[82, val_82]=/list_bucketing_mul_col/ds=2008-04-08/hr=11/col2=82/col4=val_82, [466, val_466]=/list_bucketing_mul_col/ds=2008-04-08/hr=11/col2=466/col4=val_466, [287, val_287]=/list_bucketing_mul_col/ds=2008-04-08/hr=11/col2=287/col4=val_287}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: explain extended
-select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-              bucket_count -1
-              columns col1,col2,col3,col4,col5
-              columns.comments 
-              columns.types string:string:string:string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_mul_col
-              numFiles 4
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 6312
-              serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 7094
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns col1,col2,col3,col4,col5
-                columns.comments 
-                columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_mul_col
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_mul_col
-            name: default.list_bucketing_mul_col
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_mul_col
-          Statistics: Num rows: 500 Data size: 6312 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((col2 = '466') and (col4 = 'val_466')) (type: boolean)
-            Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: col1 (type: string), '466' (type: string), col3 (type: string), 'val_466' (type: string), col5 (type: string), '2008-04-08' (type: string), '11' (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_mul_col
-PREHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-1	466	1	val_466	1	2008-04-08	11
-1	466	1	val_466	1	2008-04-08	11
-1	466	1	val_466	1	2008-04-08	11
-PREHOOK: query: explain extended
-select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-              bucket_count -1
-              columns col1,col2,col3,col4,col5
-              columns.comments 
-              columns.types string:string:string:string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_mul_col
-              numFiles 4
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 6312
-              serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 7094
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns col1,col2,col3,col4,col5
-                columns.comments 
-                columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_mul_col
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_mul_col
-            name: default.list_bucketing_mul_col
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_mul_col
-          Statistics: Num rows: 500 Data size: 6312 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((col2 = '382') and (col4 = 'val_382')) (type: boolean)
-            Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: col1 (type: string), '382' (type: string), col3 (type: string), 'val_382' (type: string), col5 (type: string), '2008-04-08' (type: string), '11' (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_mul_col
-PREHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-1	382	1	val_382	1	2008-04-08	11
-1	382	1	val_382	1	2008-04-08	11
-PREHOOK: query: drop table list_bucketing_mul_col
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_mul_col
-PREHOOK: Output: default@list_bucketing_mul_col
-POSTHOOK: query: drop table list_bucketing_mul_col
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: Output: default@list_bucketing_mul_col

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.8.out b/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.8.out
deleted file mode 100644
index 6d2298b..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.8.out
+++ /dev/null
@@ -1,596 +0,0 @@
-PREHOOK: query: -- Ensure it works if skewed column is not the first column in the table columns
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
-create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 
-    partitioned by (ds String, hr String) 
-    skewed by (col2, col4) on (('466','val_466'),('287','val_287'),('82','val_82'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_mul_col
-POSTHOOK: query: -- Ensure it works if skewed column is not the first column in the table columns
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- test where the skewed values are more than 1 say columns no. 2 and 4 in a table with 5 columns
-create table list_bucketing_mul_col (col1 String, col2 String, col3 String, col4 String, col5 string) 
-    partitioned by (ds String, hr String) 
-    skewed by (col2, col4) on (('466','val_466'),('287','val_287'),('82','val_82'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_mul_col
-PREHOOK: query: -- list bucketing DML 
-explain extended
-insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08',  hr = '11')
-select 1, key, 1, value, 1 from src
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML 
-explain extended
-insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08',  hr = '11')
-select 1, key, 1, value, 1 from src
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            src
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_mul_col
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-                  '11'
-      TOK_SELECT
-         TOK_SELEXPR
-            1
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            1
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-         TOK_SELEXPR
-            1
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: src
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: UDFToString(1) (type: string), key (type: string), UDFToString(1) (type: string), value (type: string), UDFToString(1) (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns col1,col2,col3,col4,col5
-                      columns.comments 
-                      columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_mul_col
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_mul_col
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE true
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-        /src [$hdt$_0:src]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns col1,col2,col3,col4,col5
-                columns.comments 
-                columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_mul_col
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_mul_col
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08', hr = '11')
-select 1, key, 1, value, 1 from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_mul_col partition (ds = '2008-04-08', hr = '11')
-select 1, key, 1, value, 1 from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col1 EXPRESSION []
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col2 SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col3 EXPRESSION []
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col4 SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_mul_col PARTITION(ds=2008-04-08,hr=11).col5 EXPRESSION []
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_mul_col
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_mul_col
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_mul_col
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_mul_col partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: query: desc formatted list_bucketing_mul_col partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_mul_col
-# col_name            	data_type           	comment             
-	 	 
-col1                	string              	                    
-col2                	string              	                    
-col3                	string              	                    
-col4                	string              	                    
-col5                	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_mul_col	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	4                   
-	numRows             	500                 
-	rawDataSize         	6312                
-	totalSize           	7094                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[col2, col4]        	 
-Skewed Values:      	[[466, val_466], [287, val_287], [82, val_82]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[466, val_466]=/list_bucketing_mul_col/ds=2008-04-08/hr=11/col2=466/col4=val_466, [287, val_287]=/list_bucketing_mul_col/ds=2008-04-08/hr=11/col2=287/col4=val_287, [82, val_82]=/list_bucketing_mul_col/ds=2008-04-08/hr=11/col2=82/col4=val_82}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: explain extended
-select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            list_bucketing_mul_col
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_ALLCOLREF
-      TOK_WHERE
-         and
-            and
-               and
-                  =
-                     TOK_TABLE_OR_COL
-                        ds
-                     '2008-04-08'
-                  =
-                     TOK_TABLE_OR_COL
-                        hr
-                     '11'
-               =
-                  TOK_TABLE_OR_COL
-                     col2
-                  "466"
-            =
-               TOK_TABLE_OR_COL
-                  col4
-               "val_466"
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: list_bucketing_mul_col
-            Statistics: Num rows: 500 Data size: 6312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: ((col2 = '466') and (col4 = 'val_466')) (type: boolean)
-              Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: col1 (type: string), '466' (type: string), col3 (type: string), 'val_466' (type: string), col5 (type: string), '2008-04-08' (type: string), '11' (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-                  Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0,_col1,_col2,_col3,_col4,_col5,_col6
-                        columns.types string:string:string:string:string:string:string
-                        escape.delim \
-                        hive.serialization.extend.nesting.levels true
-                        serialization.format 1
-                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: col4=val_466
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns col1,col2,col3,col4,col5
-              columns.comments 
-              columns.types string:string:string:string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_mul_col
-              numFiles 4
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 6312
-              serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 7094
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns col1,col2,col3,col4,col5
-                columns.comments 
-                columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_mul_col
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_mul_col
-            name: default.list_bucketing_mul_col
-      Truncated Path -> Alias:
-        /list_bucketing_mul_col/ds=2008-04-08/hr=11/col2=466/col4=val_466 [list_bucketing_mul_col]
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_mul_col
-PREHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "466" and col4 = "val_466"
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-1	466	1	val_466	1	2008-04-08	11
-1	466	1	val_466	1	2008-04-08	11
-1	466	1	val_466	1	2008-04-08	11
-PREHOOK: query: explain extended
-select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            list_bucketing_mul_col
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_ALLCOLREF
-      TOK_WHERE
-         and
-            and
-               and
-                  =
-                     TOK_TABLE_OR_COL
-                        ds
-                     '2008-04-08'
-                  =
-                     TOK_TABLE_OR_COL
-                        hr
-                     '11'
-               =
-                  TOK_TABLE_OR_COL
-                     col2
-                  "382"
-            =
-               TOK_TABLE_OR_COL
-                  col4
-               "val_382"
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: list_bucketing_mul_col
-            Statistics: Num rows: 500 Data size: 6312 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: ((col2 = '382') and (col4 = 'val_382')) (type: boolean)
-              Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: col1 (type: string), '382' (type: string), col3 (type: string), 'val_382' (type: string), col5 (type: string), '2008-04-08' (type: string), '11' (type: string)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-                Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-                  Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        columns _col0,_col1,_col2,_col3,_col4,_col5,_col6
-                        columns.types string:string:string:string:string:string:string
-                        escape.delim \
-                        hive.serialization.extend.nesting.levels true
-                        serialization.format 1
-                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns col1,col2,col3,col4,col5
-              columns.comments 
-              columns.types string:string:string:string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_mul_col
-              numFiles 4
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 6312
-              serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 7094
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns col1,col2,col3,col4,col5
-                columns.comments 
-                columns.types string:string:string:string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_mul_col
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_mul_col
-            name: default.list_bucketing_mul_col
-      Truncated Path -> Alias:
-        /list_bucketing_mul_col/ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME [list_bucketing_mul_col]
-
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Processor Tree:
-        ListSink
-
-PREHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_mul_col
-PREHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_mul_col 
-where ds='2008-04-08' and hr='11' and col2 = "382" and col4 = "val_382"
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: Input: default@list_bucketing_mul_col@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-1	382	1	val_382	1	2008-04-08	11
-1	382	1	val_382	1	2008-04-08	11
-PREHOOK: query: drop table list_bucketing_mul_col
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_mul_col
-PREHOOK: Output: default@list_bucketing_mul_col
-POSTHOOK: query: drop table list_bucketing_mul_col
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_mul_col
-POSTHOOK: Output: default@list_bucketing_mul_col


[20/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.7.out
deleted file mode 100644
index c15c6a2..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.7.out
+++ /dev/null
@@ -1,813 +0,0 @@
-PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns. merge.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- after merge
--- 142 000000_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
--- after merge
--- 118 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns. merge.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- after merge
--- 142 000000_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
--- after merge
--- 118 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	6                   
-	numRows             	1000                
-	rawDataSize         	9624                
-	totalSize           	10898               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103, val_103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103/value=val_103, [484, val_484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-  Stage: Stage-3
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              partition_columns.types string:string
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-5
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              partition_columns.types string:string
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	4                   
-	numRows             	1000                
-	rawDataSize         	9624                
-	totalSize           	10786               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103, val_103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103/value=val_103, [484, val_484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(*) from list_bucketing_static_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from list_bucketing_static_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-1000
-PREHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              numFiles 4
-              numRows 1000
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 9624
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 10786
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_static_part
-          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
-            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: '484' (type: string), 'val_484' (type: string), '2008-04-08' (type: string), '11' (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	12
-PREHOOK: query: -- clean up
-drop table list_bucketing_static_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- clean up
-drop table list_bucketing_static_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Output: default@list_bucketing_static_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.8.out b/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.8.out
deleted file mode 100644
index d484626..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.8.out
+++ /dev/null
@@ -1,915 +0,0 @@
-PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns. merge.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- after merge
--- 142 000000_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
--- after merge
--- 118 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns. merge.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- after merge
--- 142 000000_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
--- after merge
--- 118 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            srcpart
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_static_part
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-                  '11'
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-      TOK_WHERE
-         =
-            TOK_TABLE_OR_COL
-               ds
-            '2008-04-08'
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	6                   
-	numRows             	1000                
-	rawDataSize         	9624                
-	totalSize           	10898               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484, [103, val_103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103/value=val_103}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            srcpart
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_static_part
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-                  '11'
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-      TOK_WHERE
-         =
-            TOK_TABLE_OR_COL
-               ds
-            '2008-04-08'
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
-  Stage-4
-  Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
-  Stage-2 depends on stages: Stage-0
-  Stage-3
-  Stage-5
-  Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: srcpart
-            Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string), value (type: string)
-              outputColumnNames: _col0, _col1
-              Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 1
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                Static Partition Specification: ds=2008-04-08/hr=11/
-                Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                table:
-                    input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.list_bucketing_static_part
-                      partition_columns.types string:string
-                      serialization.ddl struct list_bucketing_static_part { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                    name: default.list_bucketing_static_part
-                TotalFiles: 1
-                GatherStats: true
-                MultiFileSpray: false
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=12
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 12
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-7
-    Conditional Operator
-
-  Stage: Stage-4
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-  Stage: Stage-3
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              partition_columns.types string:string
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-5
-    Merge File Operator
-      Map Operator Tree:
-          RCFile Merge Operator
-      merge level: block
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            properties:
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              partition_columns.types string:string
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-6
-    Move Operator
-      files:
-          hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	4                   
-	numRows             	1000                
-	rawDataSize         	9624                
-	totalSize           	10786               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484, [103, val_103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103/value=val_103}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(*) from list_bucketing_static_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from list_bucketing_static_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-1000
-PREHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            list_bucketing_static_part
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_ALLCOLREF
-      TOK_WHERE
-         and
-            and
-               and
-                  =
-                     TOK_TABLE_OR_COL
-                        ds
-                     '2008-04-08'
-                  =
-                     TOK_TABLE_OR_COL
-                        hr
-                     '11'
-               =
-                  TOK_TABLE_OR_COL
-                     key
-                  '484'
-            =
-               TOK_TABLE_OR_COL
-                  value
-               'val_484'
-
-
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              numFiles 4
-              numRows 1000
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 9624
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 10786
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_static_part
-          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
-            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: '484' (type: string), 'val_484' (type: string), '2008-04-08' (type: string), '11' (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	12
-PREHOOK: query: -- clean up
-drop table list_bucketing_static_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- clean up
-drop table list_bucketing_static_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Output: default@list_bucketing_static_part


[10/34] hive git commit: HIVE-13549: Remove jdk version specific out files from Hive2 (Mohit Sabharwal, reviewed by Sergio Pena)

Posted by sp...@apache.org.
http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/list_bucket_dml_10.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/list_bucket_dml_10.q.java1.8.out b/ql/src/test/results/clientpositive/spark/list_bucket_dml_10.q.java1.8.out
deleted file mode 100644
index 12f41eb..0000000
--- a/ql/src/test/results/clientpositive/spark/list_bucket_dml_10.q.java1.8.out
+++ /dev/null
@@ -1,280 +0,0 @@
-PREHOOK: query: -- run this test case in minimr to ensure it works in cluster
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key) on ('484','51','103')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- run this test case in minimr to ensure it works in cluster
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key) on ('484','51','103')
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from src
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from src
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            src
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_static_part
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-                  '11'
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                  GatherStats: false
-                  Select Operator
-                    expressions: key (type: string), value (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-#### A masked pattern was here ####
-                      NumFilesPerFileSink: 1
-                      Static Partition Specification: ds=2008-04-08/hr=11/
-                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                      table:
-                          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                          properties:
-                            bucket_count -1
-                            columns key,value
-                            columns.comments 
-                            columns.types string:string
-#### A masked pattern was here ####
-                            name default.list_bucketing_static_part
-                            partition_columns ds/hr
-                            partition_columns.types string:string
-                            serialization.ddl struct list_bucketing_static_part { string key, string value}
-                            serialization.format 1
-                            serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                          serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                          name: default.list_bucketing_static_part
-                      TotalFiles: 1
-                      GatherStats: true
-                      MultiFileSpray: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: src
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.src
-                    numFiles 1
-                    numRows 500
-                    rawDataSize 5312
-                    serialization.ddl struct src { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      COLUMN_STATS_ACCURATE true
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.src
-                      numFiles 1
-                      numRows 500
-                      rawDataSize 5312
-                      serialization.ddl struct src { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 5812
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src
-                  name: default.src
-            Truncated Path -> Alias:
-              /src [src]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from src
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from src
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	4                   
-	numRows             	500                 
-	rawDataSize         	4812                
-	totalSize           	5520                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key]               	 
-Skewed Values:      	[[484], [51], [103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103, [51]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=51, [484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/list_bucket_dml_10.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/list_bucket_dml_10.q.out b/ql/src/test/results/clientpositive/spark/list_bucket_dml_10.q.out
new file mode 100644
index 0000000..9eca85a
--- /dev/null
+++ b/ql/src/test/results/clientpositive/spark/list_bucket_dml_10.q.out
@@ -0,0 +1,250 @@
+PREHOOK: query: -- run this test case in minimr to ensure it works in cluster
+
+-- list bucketing DML: static partition. multiple skewed columns.
+-- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+--  5263 000000_0
+--  5263 000001_0
+-- ds=2008-04-08/hr=11/key=103/value=val_103:
+-- 99 000000_0
+-- 99 000001_0
+-- ds=2008-04-08/hr=11/key=484/value=val_484:
+-- 87 000000_0
+-- 87 000001_0
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key) on ('484','51','103')
+    stored as DIRECTORIES
+    STORED AS RCFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@list_bucketing_static_part
+POSTHOOK: query: -- run this test case in minimr to ensure it works in cluster
+
+-- list bucketing DML: static partition. multiple skewed columns.
+-- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
+--  5263 000000_0
+--  5263 000001_0
+-- ds=2008-04-08/hr=11/key=103/value=val_103:
+-- 99 000000_0
+-- 99 000001_0
+-- ds=2008-04-08/hr=11/key=484/value=val_484:
+-- 87 000000_0
+-- 87 000001_0
+
+-- create a skewed table
+create table list_bucketing_static_part (key String, value String) 
+    partitioned by (ds String, hr String) 
+    skewed by (key) on ('484','51','103')
+    stored as DIRECTORIES
+    STORED AS RCFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@list_bucketing_static_part
+PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from src
+PREHOOK: type: QUERY
+POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
+explain extended
+insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
+select key, value from src
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  GatherStats: false
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      GlobalTableId: 1
+#### A masked pattern was here ####
+                      NumFilesPerFileSink: 1
+                      Static Partition Specification: ds=2008-04-08/hr=11/
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                      table:
+                          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+                          properties:
+                            bucket_count -1
+                            columns key,value
+                            columns.comments 
+                            columns.types string:string
+#### A masked pattern was here ####
+                            name default.list_bucketing_static_part
+                            partition_columns ds/hr
+                            partition_columns.types string:string
+                            serialization.ddl struct list_bucketing_static_part { string key, string value}
+                            serialization.format 1
+                            serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+                          serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+                          name: default.list_bucketing_static_part
+                      TotalFiles: 1
+                      GatherStats: true
+                      MultiFileSpray: false
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: src
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                    bucket_count -1
+                    columns key,value
+                    columns.comments 'default','default'
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.src
+                    numFiles 1
+                    numRows 500
+                    rawDataSize 5312
+                    serialization.ddl struct src { string key, string value}
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 5812
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                      bucket_count -1
+                      columns key,value
+                      columns.comments 'default','default'
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.src
+                      numFiles 1
+                      numRows 500
+                      rawDataSize 5312
+                      serialization.ddl struct src { string key, string value}
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 5812
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.src
+                  name: default.src
+            Truncated Path -> Alias:
+              /src [src]
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          partition:
+            ds 2008-04-08
+            hr 11
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types string:string
+#### A masked pattern was here ####
+                name default.list_bucketing_static_part
+                partition_columns ds/hr
+                partition_columns.types string:string
+                serialization.ddl struct list_bucketing_static_part { string key, string value}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+              name: default.list_bucketing_static_part
+
+  Stage: Stage-2
+    Stats-Aggr Operator
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
+select key, value from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
+select key, value from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: -- check DML result
+show partitions list_bucketing_static_part
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Input: default@list_bucketing_static_part
+ds=2008-04-08/hr=11
+PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@list_bucketing_static_part
+POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@list_bucketing_static_part
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	                    
+value               	string              	                    
+	 	 
+# Partition Information	 	 
+# col_name            	data_type           	comment             
+	 	 
+ds                  	string              	                    
+hr                  	string              	                    
+	 	 
+# Detailed Partition Information	 	 
+Partition Value:    	[2008-04-08, 11]    	 
+Database:           	default             	 
+Table:              	list_bucketing_static_part	 
+#### A masked pattern was here ####
+Partition Parameters:	 	 
+	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
+	numFiles            	4                   
+	numRows             	500                 
+	rawDataSize         	4812                
+	totalSize           	5520                
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
+InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Stored As SubDirectories:	Yes                 	 
+Skewed Columns:     	[key]               	 
+Skewed Values:      	[[484], [51], [103]]	 
+#### A masked pattern was here ####
+Skewed Value to Truncated Path:	{[103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103, [51]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=51, [484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484}	 
+Storage Desc Params:	 	 
+	serialization.format	1                   

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/list_bucket_dml_2.q.java1.7.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/list_bucket_dml_2.q.java1.7.out b/ql/src/test/results/clientpositive/spark/list_bucket_dml_2.q.java1.7.out
deleted file mode 100644
index d8da70c..0000000
--- a/ql/src/test/results/clientpositive/spark/list_bucket_dml_2.q.java1.7.out
+++ /dev/null
@@ -1,591 +0,0 @@
-PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: srcpart
-                  Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-                  GatherStats: false
-                  Select Operator
-                    expressions: key (type: string), value (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-#### A masked pattern was here ####
-                      NumFilesPerFileSink: 1
-                      Static Partition Specification: ds=2008-04-08/hr=11/
-                      Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                      table:
-                          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                          properties:
-                            bucket_count -1
-                            columns key,value
-                            columns.comments 
-                            columns.types string:string
-#### A masked pattern was here ####
-                            name default.list_bucketing_static_part
-                            partition_columns ds/hr
-                            partition_columns.types string:string
-                            serialization.ddl struct list_bucketing_static_part { string key, string value}
-                            serialization.format 1
-                            serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                          serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                          name: default.list_bucketing_static_part
-                      TotalFiles: 1
-                      GatherStats: true
-                      MultiFileSpray: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=11
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 11
-                  properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=12
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 12
-                  properties:
-                    COLUMN_STATS_ACCURATE {"COLUMN_STATS":{"key":"true","value":"true"},"BASIC_STATS":"true"}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-            Truncated Path -> Alias:
-              /srcpart/ds=2008-04-08/hr=11 [srcpart]
-              /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}
-	numFiles            	6                   
-	numRows             	1000                
-	rawDataSize         	9624                
-	totalSize           	10898               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[103, val_103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103/value=val_103, [484, val_484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(*) from list_bucketing_static_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from list_bucketing_static_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-1000
-PREHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              numFiles 6
-              numRows 1000
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 9624
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 10898
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_static_part
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
-            Select Operator
-              expressions: '484' (type: string), 'val_484' (type: string), '2008-04-08' (type: string), '11' (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	12
-PREHOOK: query: -- 51 and val_51 in the table so skewed data for 51 and val_14 should be none
--- but query should succeed for 51 or 51 and val_14
-select * from srcpart where ds = '2008-04-08' and key = '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: -- 51 and val_51 in the table so skewed data for 51 and val_14 should be none
--- but query should succeed for 51 or 51 and val_14
-select * from srcpart where ds = '2008-04-08' and key = '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	12
-51	val_51	2008-04-08	12
-PREHOOK: query: select * from list_bucketing_static_part where key = '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where key = '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-PREHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-PREHOOK: query: -- queries with < <= > >= should work for skewed test although we don't benefit from pruning
-select count(1) from srcpart where ds = '2008-04-08' and key < '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: -- queries with < <= > >= should work for skewed test although we don't benefit from pruning
-select count(1) from srcpart where ds = '2008-04-08' and key < '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-910
-PREHOOK: query: select count(1) from list_bucketing_static_part where key < '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key < '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-910
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key <= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key <= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-914
-PREHOOK: query: select count(1) from list_bucketing_static_part where key <= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key <= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-914
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key > '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key > '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-86
-PREHOOK: query: select count(1) from list_bucketing_static_part where key > '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key > '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-86
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key >= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key >= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-90
-PREHOOK: query: select count(1) from list_bucketing_static_part where key >= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key >= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-90
-PREHOOK: query: -- clean up
-drop table list_bucketing_static_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- clean up
-drop table list_bucketing_static_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Output: default@list_bucketing_static_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/list_bucket_dml_2.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/list_bucket_dml_2.q.java1.8.out b/ql/src/test/results/clientpositive/spark/list_bucket_dml_2.q.java1.8.out
deleted file mode 100644
index 23dc6a3..0000000
--- a/ql/src/test/results/clientpositive/spark/list_bucket_dml_2.q.java1.8.out
+++ /dev/null
@@ -1,663 +0,0 @@
-PREHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- list bucketing DML: static partition. multiple skewed columns.
--- ds=2008-04-08/hr=11/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
---  5263 000000_0
---  5263 000001_0
--- ds=2008-04-08/hr=11/key=103/value=val_103:
--- 99 000000_0
--- 99 000001_0
--- ds=2008-04-08/hr=11/key=484/value=val_484:
--- 87 000000_0
--- 87 000001_0
-
--- create a skewed table
-create table list_bucketing_static_part (key String, value String) 
-    partitioned by (ds String, hr String) 
-    skewed by (key, value) on (('484','val_484'),('51','val_14'),('103','val_103'))
-    stored as DIRECTORIES
-    STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_static_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to generate a few small files.
-explain extended
-insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08',  hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            srcpart
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_TAB
-            TOK_TABNAME
-               list_bucketing_static_part
-            TOK_PARTSPEC
-               TOK_PARTVAL
-                  ds
-                  '2008-04-08'
-               TOK_PARTVAL
-                  hr
-                  '11'
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               key
-         TOK_SELEXPR
-            TOK_TABLE_OR_COL
-               value
-      TOK_WHERE
-         =
-            TOK_TABLE_OR_COL
-               ds
-            '2008-04-08'
-
-
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Spark
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: srcpart
-                  Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-                  GatherStats: false
-                  Select Operator
-                    expressions: key (type: string), value (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 1
-#### A masked pattern was here ####
-                      NumFilesPerFileSink: 1
-                      Static Partition Specification: ds=2008-04-08/hr=11/
-                      Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                      table:
-                          input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-                          output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-                          properties:
-                            bucket_count -1
-                            columns key,value
-                            columns.comments 
-                            columns.types string:string
-#### A masked pattern was here ####
-                            name default.list_bucketing_static_part
-                            partition_columns ds/hr
-                            partition_columns.types string:string
-                            serialization.ddl struct list_bucketing_static_part { string key, string value}
-                            serialization.format 1
-                            serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-                          serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-                          name: default.list_bucketing_static_part
-                      TotalFiles: 1
-                      GatherStats: true
-                      MultiFileSpray: false
-            Path -> Alias:
-#### A masked pattern was here ####
-            Path -> Partition:
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=11
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 11
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-#### A masked pattern was here ####
-                Partition
-                  base file name: hr=12
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 12
-                  properties:
-                    COLUMN_STATS_ACCURATE true
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string value}
-                    serialization.format 1
-                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string value}
-                      serialization.format 1
-                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-            Truncated Path -> Alias:
-              /srcpart/ds=2008-04-08/hr=11 [srcpart]
-              /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds 2008-04-08
-            hr 11
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: query: insert overwrite table list_bucketing_static_part partition (ds = '2008-04-08', hr = '11')
-select key, value from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_static_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_static_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_static_part
-ds=2008-04-08/hr=11
-PREHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: query: desc formatted list_bucketing_static_part partition (ds='2008-04-08', hr='11')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-# col_name            	data_type           	comment             
-	 	 
-key                 	string              	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-hr                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2008-04-08, 11]    	 
-Database:           	default             	 
-Table:              	list_bucketing_static_part	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	6                   
-	numRows             	1000                
-	rawDataSize         	9624                
-	totalSize           	10898               
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.RCFileInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.RCFileOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Stored As SubDirectories:	Yes                 	 
-Skewed Columns:     	[key, value]        	 
-Skewed Values:      	[[484, val_484], [51, val_14], [103, val_103]]	 
-#### A masked pattern was here ####
-Skewed Value to Truncated Path:	{[484, val_484]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=484/value=val_484, [103, val_103]=/list_bucketing_static_part/ds=2008-04-08/hr=11/key=103/value=val_103}	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(*) from list_bucketing_static_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from list_bucketing_static_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-1000
-PREHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  
-TOK_QUERY
-   TOK_FROM
-      TOK_TABREF
-         TOK_TABNAME
-            list_bucketing_static_part
-   TOK_INSERT
-      TOK_DESTINATION
-         TOK_DIR
-            TOK_TMP_FILE
-      TOK_SELECT
-         TOK_SELEXPR
-            TOK_ALLCOLREF
-      TOK_WHERE
-         and
-            and
-               and
-                  =
-                     TOK_TABLE_OR_COL
-                        ds
-                     '2008-04-08'
-                  =
-                     TOK_TABLE_OR_COL
-                        hr
-                     '11'
-               =
-                  TOK_TABLE_OR_COL
-                     key
-                  '484'
-            =
-               TOK_TABLE_OR_COL
-                  value
-               'val_484'
-
-
-STAGE DEPENDENCIES:
-  Stage-0 is a root stage
-
-STAGE PLANS:
-  Stage: Stage-0
-    Fetch Operator
-      limit: -1
-      Partition Description:
-          Partition
-            input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE true
-              bucket_count -1
-              columns key,value
-              columns.comments 
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.list_bucketing_static_part
-              numFiles 6
-              numRows 1000
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 9624
-              serialization.ddl struct list_bucketing_static_part { string key, string value}
-              serialization.format 1
-              serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              totalSize 10898
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-          
-              input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.list_bucketing_static_part
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct list_bucketing_static_part { string key, string value}
-                serialization.format 1
-                serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-              name: default.list_bucketing_static_part
-            name: default.list_bucketing_static_part
-      Processor Tree:
-        TableScan
-          alias: list_bucketing_static_part
-          Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE
-          GatherStats: false
-          Filter Operator
-            isSamplingPred: false
-            predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
-            Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-            Select Operator
-              expressions: '484' (type: string), 'val_484' (type: string), '2008-04-08' (type: string), '11' (type: string)
-              outputColumnNames: _col0, _col1, _col2, _col3
-              Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
-              ListSink
-
-PREHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where ds = '2008-04-08' and  hr = '11' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484' and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-484	val_484	2008-04-08	11
-484	val_484	2008-04-08	12
-PREHOOK: query: -- 51 and val_51 in the table so skewed data for 51 and val_14 should be none
--- but query should succeed for 51 or 51 and val_14
-select * from srcpart where ds = '2008-04-08' and key = '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: -- 51 and val_51 in the table so skewed data for 51 and val_14 should be none
--- but query should succeed for 51 or 51 and val_14
-select * from srcpart where ds = '2008-04-08' and key = '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	12
-51	val_51	2008-04-08	12
-PREHOOK: query: select * from list_bucketing_static_part where key = '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where key = '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-51	val_51	2008-04-08	11
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '51' and value = 'val_14'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-PREHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_static_part where key = '51' and value = 'val_14'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-PREHOOK: query: -- queries with < <= > >= should work for skewed test although we don't benefit from pruning
-select count(1) from srcpart where ds = '2008-04-08' and key < '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: -- queries with < <= > >= should work for skewed test although we don't benefit from pruning
-select count(1) from srcpart where ds = '2008-04-08' and key < '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-910
-PREHOOK: query: select count(1) from list_bucketing_static_part where key < '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key < '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-910
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key <= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key <= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-914
-PREHOOK: query: select count(1) from list_bucketing_static_part where key <= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key <= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-914
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key > '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key > '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-86
-PREHOOK: query: select count(1) from list_bucketing_static_part where key > '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key > '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-86
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key >= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08' and key >= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-90
-PREHOOK: query: select count(1) from list_bucketing_static_part where key >= '51'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from list_bucketing_static_part where key >= '51'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Input: default@list_bucketing_static_part@ds=2008-04-08/hr=11
-#### A masked pattern was here ####
-90
-PREHOOK: query: -- clean up
-drop table list_bucketing_static_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_static_part
-PREHOOK: Output: default@list_bucketing_static_part
-POSTHOOK: query: -- clean up
-drop table list_bucketing_static_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_static_part
-POSTHOOK: Output: default@list_bucketing_static_part

http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/spark/list_bucket_dml_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/list_bucket_dml_2.q.out b/ql/src/test/results/clientpositive/spark/list_bucket_dml_2.q.out
index 3ee9b5a..c83c02e 100644
Binary files a/ql/src/test/results/clientpositive/spark/list_bucket_dml_2.q.out and b/ql/src/test/results/clientpositive/spark/list_bucket_dml_2.q.out differ