You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/11/07 21:41:45 UTC

svn commit: r1637444 [13/20] - in /hive/branches/spark: ./ cli/src/test/org/apache/hadoop/hive/cli/ common/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/hive/conf/ common/src/test/org/apache/hadoop/hive/conf/ co...

Modified: hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_unqual4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_unqual4.q.out?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_unqual4.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/join_cond_pushdown_unqual4.q.out Fri Nov  7 20:41:34 2014
@@ -1,45 +1,3 @@
-PREHOOK: query: DROP TABLE part
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE part
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: -- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@part
-POSTHOOK: query: -- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@part
 PREHOOK: query: create table part2( 
     p2_partkey INT,
     p2_name STRING,
@@ -114,27 +72,27 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: p4
-            Statistics: Num rows: 5 Data size: 3173 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: p_name is not null (type: boolean)
-              Statistics: Num rows: 3 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: p_name (type: string)
                 sort order: +
                 Map-reduce partition columns: p_name (type: string)
-                Statistics: Num rows: 3 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
                 value expressions: p_partkey (type: int), p_mfgr (type: string), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string)
           TableScan
             alias: p1
-            Statistics: Num rows: 5 Data size: 3173 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: p_name is not null (type: boolean)
-              Statistics: Num rows: 3 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: p_name (type: string)
                 sort order: +
                 Map-reduce partition columns: p_name (type: string)
-                Statistics: Num rows: 3 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
                 value expressions: p_partkey (type: int), p_mfgr (type: string), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string)
           TableScan
             alias: p3
@@ -172,17 +130,17 @@ STAGE PLANS:
             2 {VALUE._col0} {KEY.reducesinkkey0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7}
             3 {VALUE._col0} {KEY.reducesinkkey0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7}
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col36, _col37, _col38, _col39, _col40, _col41, _col42, _col43, _col44
-          Statistics: Num rows: 9 Data size: 6279 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 42 Data size: 5190 Basic stats: COMPLETE Column stats: NONE
           Filter Operator
             predicate: ((_col13 = _col25) and (_col1 = _col37)) (type: boolean)
-            Statistics: Num rows: 2 Data size: 1395 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 10 Data size: 1235 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col12 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: string), _col17 (type: int), _col18 (type: string), _col19 (type: double), _col20 (type: string), _col24 (type: int), _col25 (type: string), _col26 (type: string), _col27 (type: string), _col28 (type: string), _col29 (type: int), _col30 (type: string), _col31 (type: double), _col32 (type: string), _col36 (type: int), _col37 (type: string), _col38 (type: string), _col39 (type: string), _col40 (type: string), _col41 (type: int), _col42 (type: string), _col43 (type: double), _col44 (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35
-              Statistics: Num rows: 2 Data size: 1395 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 10 Data size: 1235 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 2 Data size: 1395 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 10 Data size: 1235 Basic stats: COMPLETE Column stats: NONE
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -228,15 +186,15 @@ STAGE PLANS:
                 value expressions: p2_mfgr (type: string), p2_brand (type: string), p2_type (type: string), p2_size (type: int), p2_container (type: string), p2_retailprice (type: double), p2_comment (type: string)
           TableScan
             alias: p1
-            Statistics: Num rows: 5 Data size: 3173 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (p_name is not null and p_partkey is not null) (type: boolean)
-              Statistics: Num rows: 2 Data size: 1269 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 7 Data size: 847 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: p_name (type: string), p_partkey (type: int)
                 sort order: ++
                 Map-reduce partition columns: p_name (type: string), p_partkey (type: int)
-                Statistics: Num rows: 2 Data size: 1269 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 7 Data size: 847 Basic stats: COMPLETE Column stats: NONE
                 value expressions: p_mfgr (type: string), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string)
       Reduce Operator Tree:
         Join Operator
@@ -246,7 +204,7 @@ STAGE PLANS:
             0 {KEY.reducesinkkey1} {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6}
             1 {KEY.reducesinkkey1} {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6}
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20
-          Statistics: Num rows: 2 Data size: 1395 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 7 Data size: 931 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
             table:
@@ -274,7 +232,7 @@ STAGE PLANS:
               key expressions: _col13 (type: string)
               sort order: +
               Map-reduce partition columns: _col13 (type: string)
-              Statistics: Num rows: 2 Data size: 1395 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 7 Data size: 931 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col12 (type: int), _col14 (type: string), _col15 (type: string), _col16 (type: string), _col17 (type: int), _col18 (type: string), _col19 (type: double), _col20 (type: string)
       Reduce Operator Tree:
         Join Operator
@@ -284,7 +242,7 @@ STAGE PLANS:
             0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {VALUE._col12} {KEY.reducesinkkey0} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} {VALUE._col19}
             1 {VALUE._col0} {KEY.reducesinkkey0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7}
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32
-          Statistics: Num rows: 2 Data size: 1534 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 7 Data size: 1024 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
             table:
@@ -297,22 +255,22 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: p4
-            Statistics: Num rows: 5 Data size: 3173 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: p_partkey is not null (type: boolean)
-              Statistics: Num rows: 3 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: p_partkey (type: int)
                 sort order: +
                 Map-reduce partition columns: p_partkey (type: int)
-                Statistics: Num rows: 3 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 13 Data size: 1573 Basic stats: COMPLETE Column stats: NONE
                 value expressions: p_name (type: string), p_mfgr (type: string), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string)
           TableScan
             Reduce Output Operator
               key expressions: _col0 (type: int)
               sort order: +
               Map-reduce partition columns: _col0 (type: int)
-              Statistics: Num rows: 2 Data size: 1534 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 7 Data size: 1024 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col12 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: string), _col17 (type: int), _col18 (type: string), _col19 (type: double), _col20 (type: string), _col24 (type: int), _col25 (type: string), _col26 (type: string), _col27 (type: string), _col28 (type: string), _col29 (type: int), _col30 (type: string), _col31 (type: double), _col32 (type: string)
       Reduce Operator Tree:
         Join Operator
@@ -322,17 +280,17 @@ STAGE PLANS:
             0 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col11} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18} {VALUE._col19} {VALUE._col23} {VALUE._col24} {VALUE._col25} {VALUE._col26} {VALUE._col27} {VALUE._col28} {VALUE._col29} {VALUE._col30} {VALUE._col31}
             1 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7}
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col36, _col37, _col38, _col39, _col40, _col41, _col42, _col43, _col44
-          Statistics: Num rows: 3 Data size: 2093 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE Column stats: NONE
           Filter Operator
             predicate: (((_col13 = _col25) and (_col0 = _col36)) and (_col0 = _col12)) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 1 Data size: 123 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col12 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: string), _col17 (type: int), _col18 (type: string), _col19 (type: double), _col20 (type: string), _col24 (type: int), _col25 (type: string), _col26 (type: string), _col27 (type: string), _col28 (type: string), _col29 (type: int), _col30 (type: string), _col31 (type: double), _col32 (type: string), _col36 (type: int), _col37 (type: string), _col38 (type: string), _col39 (type: string), _col40 (type: string), _col41 (type: int), _col42 (type: string), _col43 (type: double), _col44 (type: string)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 1 Data size: 123 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 1 Data size: 123 Basic stats: COMPLETE Column stats: NONE
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/join_merging.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/join_merging.q.out?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/join_merging.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/join_merging.q.out Fri Nov  7 20:41:34 2014
@@ -1,31 +1,3 @@
-PREHOOK: query: CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@part
-POSTHOOK: query: CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@part
 PREHOOK: query: explain select p1.p_size, p2.p_size 
 from part p1 left outer join part p2 on p1.p_partkey = p2.p_partkey 
   right outer join part p3 on p2.p_partkey = p3.p_partkey and 
@@ -46,32 +18,32 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: p3
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
             Reduce Output Operator
               key expressions: p_partkey (type: int)
               sort order: +
               Map-reduce partition columns: p_partkey (type: int)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
           TableScan
             alias: p2
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
             Reduce Output Operator
               key expressions: p_partkey (type: int)
               sort order: +
               Map-reduce partition columns: p_partkey (type: int)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
               value expressions: p_size (type: int)
           TableScan
             alias: p1
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (p_size > 10) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 8 Data size: 968 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: p_partkey (type: int)
                 sort order: +
                 Map-reduce partition columns: p_partkey (type: int)
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 8 Data size: 968 Basic stats: COMPLETE Column stats: NONE
                 value expressions: p_size (type: int)
       Reduce Operator Tree:
         Join Operator
@@ -83,14 +55,14 @@ STAGE PLANS:
             1 {VALUE._col4}
             2 
           outputColumnNames: _col5, _col17
-          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Statistics: Num rows: 57 Data size: 6923 Basic stats: COMPLETE Column stats: NONE
           Select Operator
             expressions: _col5 (type: int), _col17 (type: int)
             outputColumnNames: _col0, _col1
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 57 Data size: 6923 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 57 Data size: 6923 Basic stats: COMPLETE Column stats: NONE
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -122,32 +94,32 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: p3
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
             Reduce Output Operator
               key expressions: p_partkey (type: int)
               sort order: +
               Map-reduce partition columns: p_partkey (type: int)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
           TableScan
             alias: p2
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
             Reduce Output Operator
               key expressions: p_partkey (type: int)
               sort order: +
               Map-reduce partition columns: p_partkey (type: int)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
               value expressions: p_size (type: int)
           TableScan
             alias: p1
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: (p_size > 10) (type: boolean)
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 8 Data size: 968 Basic stats: COMPLETE Column stats: NONE
               Reduce Output Operator
                 key expressions: p_partkey (type: int)
                 sort order: +
                 Map-reduce partition columns: p_partkey (type: int)
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 8 Data size: 968 Basic stats: COMPLETE Column stats: NONE
                 value expressions: p_size (type: int)
       Reduce Operator Tree:
         Join Operator
@@ -159,17 +131,17 @@ STAGE PLANS:
             1 {VALUE._col4}
             2 
           outputColumnNames: _col5, _col17
-          Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+          Statistics: Num rows: 57 Data size: 6923 Basic stats: COMPLETE Column stats: NONE
           Filter Operator
             predicate: (_col5 > (_col17 + 10)) (type: boolean)
-            Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+            Statistics: Num rows: 19 Data size: 2307 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: _col5 (type: int), _col17 (type: int)
               outputColumnNames: _col0, _col1
-              Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+              Statistics: Num rows: 19 Data size: 2307 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                Statistics: Num rows: 19 Data size: 2307 Basic stats: COMPLETE Column stats: NONE
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/lateral_view.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/lateral_view.q.out?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/lateral_view.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/lateral_view.q.out Fri Nov  7 20:41:34 2014
@@ -132,14 +132,14 @@ STAGE PLANS:
             Lateral View Forward
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
-                Statistics: Num rows: 500 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 500 Data size: 134000 Basic stats: COMPLETE Column stats: COMPLETE
                 Lateral View Join Operator
                   outputColumnNames: _col5
-                  Statistics: Num rows: 1000 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1000 Data size: 162000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: _col5 (type: int)
                     outputColumnNames: _col0
-                    Statistics: Num rows: 1000 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1000 Data size: 162000 Basic stats: COMPLETE Column stats: COMPLETE
                     Limit
                       Number of rows: 3
                       Statistics: Num rows: 3 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
@@ -159,11 +159,11 @@ STAGE PLANS:
                   function name: explode
                   Lateral View Join Operator
                     outputColumnNames: _col5
-                    Statistics: Num rows: 1000 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1000 Data size: 162000 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: _col5 (type: int)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 1000 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 1000 Data size: 162000 Basic stats: COMPLETE Column stats: COMPLETE
                       Limit
                         Number of rows: 3
                         Statistics: Num rows: 3 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
@@ -199,12 +199,12 @@ STAGE PLANS:
             Lateral View Forward
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
-                Statistics: Num rows: 500 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 500 Data size: 134000 Basic stats: COMPLETE Column stats: COMPLETE
                 Lateral View Join Operator
                   outputColumnNames: _col5
-                  Statistics: Num rows: 1000 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1000 Data size: 162000 Basic stats: COMPLETE Column stats: COMPLETE
                   Lateral View Forward
-                    Statistics: Num rows: 1000 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1000 Data size: 162000 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: _col5 (type: int)
                       outputColumnNames: _col5
@@ -259,9 +259,9 @@ STAGE PLANS:
                   function name: explode
                   Lateral View Join Operator
                     outputColumnNames: _col5
-                    Statistics: Num rows: 1000 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1000 Data size: 162000 Basic stats: COMPLETE Column stats: COMPLETE
                     Lateral View Forward
-                      Statistics: Num rows: 1000 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 1000 Data size: 162000 Basic stats: COMPLETE Column stats: COMPLETE
                       Select Operator
                         expressions: _col5 (type: int)
                         outputColumnNames: _col5
@@ -332,12 +332,12 @@ STAGE PLANS:
             Lateral View Forward
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
-                Statistics: Num rows: 500 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 500 Data size: 134000 Basic stats: COMPLETE Column stats: COMPLETE
                 Lateral View Join Operator
                   outputColumnNames: _col5
-                  Statistics: Num rows: 1000 Data size: 24000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1000 Data size: 158000 Basic stats: COMPLETE Column stats: COMPLETE
                   Lateral View Forward
-                    Statistics: Num rows: 1000 Data size: 24000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1000 Data size: 158000 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       Statistics: Num rows: 1000 Data size: 268000 Basic stats: COMPLETE Column stats: COMPLETE
                       Lateral View Join Operator
@@ -390,9 +390,9 @@ STAGE PLANS:
                   function name: explode
                   Lateral View Join Operator
                     outputColumnNames: _col5
-                    Statistics: Num rows: 1000 Data size: 24000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1000 Data size: 158000 Basic stats: COMPLETE Column stats: COMPLETE
                     Lateral View Forward
-                      Statistics: Num rows: 1000 Data size: 24000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 1000 Data size: 158000 Basic stats: COMPLETE Column stats: COMPLETE
                       Select Operator
                         Statistics: Num rows: 1000 Data size: 268000 Basic stats: COMPLETE Column stats: COMPLETE
                         Lateral View Join Operator
@@ -519,10 +519,10 @@ STAGE PLANS:
             Lateral View Forward
               Statistics: Num rows: 500 Data size: 1406 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
-                Statistics: Num rows: 500 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 500 Data size: 134000 Basic stats: COMPLETE Column stats: COMPLETE
                 Lateral View Join Operator
                   outputColumnNames: _col4
-                  Statistics: Num rows: 1000 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1000 Data size: 162000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: _col4 (type: int)
                     outputColumnNames: _col0
@@ -546,7 +546,7 @@ STAGE PLANS:
                   function name: explode
                   Lateral View Join Operator
                     outputColumnNames: _col4
-                    Statistics: Num rows: 1000 Data size: 28000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1000 Data size: 162000 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: _col4 (type: int)
                       outputColumnNames: _col0

Modified: hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_noalias.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_noalias.q.out?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_noalias.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_noalias.q.out Fri Nov  7 20:41:34 2014
@@ -18,14 +18,14 @@ STAGE PLANS:
             Lateral View Forward
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
-                Statistics: Num rows: 500 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 500 Data size: 134000 Basic stats: COMPLETE Column stats: COMPLETE
                 Lateral View Join Operator
                   outputColumnNames: _col5, _col6
-                  Statistics: Num rows: 1000 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1000 Data size: 326000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: _col5 (type: string), _col6 (type: int)
                     outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1000 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1000 Data size: 326000 Basic stats: COMPLETE Column stats: COMPLETE
                     Limit
                       Number of rows: 2
                       Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
@@ -45,11 +45,11 @@ STAGE PLANS:
                   function name: explode
                   Lateral View Join Operator
                     outputColumnNames: _col5, _col6
-                    Statistics: Num rows: 1000 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1000 Data size: 326000 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: _col5 (type: string), _col6 (type: int)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 1000 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
+                      Statistics: Num rows: 1000 Data size: 326000 Basic stats: COMPLETE Column stats: COMPLETE
                       Limit
                         Number of rows: 2
                         Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
@@ -158,10 +158,10 @@ STAGE PLANS:
             Lateral View Forward
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
-                Statistics: Num rows: 500 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 500 Data size: 134000 Basic stats: COMPLETE Column stats: COMPLETE
                 Lateral View Join Operator
                   outputColumnNames: _col5, _col6
-                  Statistics: Num rows: 1000 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1000 Data size: 326000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: _col5 (type: string), _col6 (type: int)
                     outputColumnNames: _col0, _col1
@@ -182,7 +182,7 @@ STAGE PLANS:
                   function name: explode
                   Lateral View Join Operator
                     outputColumnNames: _col5, _col6
-                    Statistics: Num rows: 1000 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1000 Data size: 326000 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: _col5 (type: string), _col6 (type: int)
                       outputColumnNames: _col0, _col1
@@ -259,10 +259,10 @@ STAGE PLANS:
             Lateral View Forward
               Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
-                Statistics: Num rows: 500 Data size: 0 Basic stats: PARTIAL Column stats: COMPLETE
+                Statistics: Num rows: 500 Data size: 134000 Basic stats: COMPLETE Column stats: COMPLETE
                 Lateral View Join Operator
                   outputColumnNames: _col5, _col6
-                  Statistics: Num rows: 1000 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
+                  Statistics: Num rows: 1000 Data size: 326000 Basic stats: COMPLETE Column stats: COMPLETE
                   Select Operator
                     expressions: _col5 (type: string), _col6 (type: int)
                     outputColumnNames: _col0, _col1
@@ -283,7 +283,7 @@ STAGE PLANS:
                   function name: explode
                   Lateral View Join Operator
                     outputColumnNames: _col5, _col6
-                    Statistics: Num rows: 1000 Data size: 192000 Basic stats: COMPLETE Column stats: COMPLETE
+                    Statistics: Num rows: 1000 Data size: 326000 Basic stats: COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: _col5 (type: string), _col6 (type: int)
                       outputColumnNames: _col0, _col1

Modified: hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_ppd.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_ppd.q.out?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_ppd.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/lateral_view_ppd.q.out Fri Nov  7 20:41:34 2014
@@ -175,23 +175,44 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: srcpart
-            Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-            Filter Operator
-              predicate: ((ds = '2008-04-08') and (hr = '12')) (type: boolean)
-              Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-              Lateral View Forward
-                Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: value (type: string)
-                  outputColumnNames: value
-                  Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+            Lateral View Forward
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: value (type: string)
+                outputColumnNames: value
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                Lateral View Join Operator
+                  outputColumnNames: _col1, _col7
+                  Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col1 (type: string), _col7 (type: int)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
+                    Limit
+                      Number of rows: 12
+                      Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              Select Operator
+                expressions: array(1,2,3) (type: array<int>)
+                outputColumnNames: _col0
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                UDTF Operator
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+                  function name: explode
                   Lateral View Join Operator
                     outputColumnNames: _col1, _col7
-                    Statistics: Num rows: 4000 Data size: 42496 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col1 (type: string), _col7 (type: int)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 4000 Data size: 42496 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE
                       Limit
                         Number of rows: 12
                         Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
@@ -202,30 +223,6 @@ STAGE PLANS:
                               input format: org.apache.hadoop.mapred.TextInputFormat
                               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                Select Operator
-                  expressions: array(1,2,3) (type: array<int>)
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                  UDTF Operator
-                    Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE
-                    function name: explode
-                    Lateral View Join Operator
-                      outputColumnNames: _col1, _col7
-                      Statistics: Num rows: 4000 Data size: 42496 Basic stats: COMPLETE Column stats: NONE
-                      Select Operator
-                        expressions: _col1 (type: string), _col7 (type: int)
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 4000 Data size: 42496 Basic stats: COMPLETE Column stats: NONE
-                        Limit
-                          Number of rows: 12
-                          Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
-                          File Output Operator
-                            compressed: false
-                            Statistics: Num rows: 12 Data size: 120 Basic stats: COMPLETE Column stats: NONE
-                            table:
-                                input format: org.apache.hadoop.mapred.TextInputFormat
-                                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
     Fetch Operator
@@ -236,18 +233,12 @@ STAGE PLANS:
 PREHOOK: query: SELECT value, myCol FROM (SELECT * FROM srcpart LATERAL VIEW explode(array(1,2,3)) myTable AS myCol) a WHERE ds='2008-04-08' AND hr="12" LIMIT 12
 PREHOOK: type: QUERY
 PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 #### A masked pattern was here ####
 POSTHOOK: query: SELECT value, myCol FROM (SELECT * FROM srcpart LATERAL VIEW explode(array(1,2,3)) myTable AS myCol) a WHERE ds='2008-04-08' AND hr="12" LIMIT 12
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 #### A masked pattern was here ####
 val_238	1
 val_238	2

Modified: hive/branches/spark/ql/src/test/results/clientpositive/leadlag.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/leadlag.q.out?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/leadlag.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/leadlag.q.out Fri Nov  7 20:41:34 2014
@@ -1,45 +1,3 @@
-PREHOOK: query: DROP TABLE part
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE part
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: -- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@part
-POSTHOOK: query: -- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@part
 PREHOOK: query: --1. testLagWithPTFWindowing
 select p_mfgr, p_name,
 rank() over (partition by p_mfgr order by p_name) as r,

Modified: hive/branches/spark/ql/src/test/results/clientpositive/leadlag_queries.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/leadlag_queries.q.out?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/leadlag_queries.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/leadlag_queries.q.out Fri Nov  7 20:41:34 2014
@@ -1,41 +1,3 @@
-PREHOOK: query: -- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@part
-POSTHOOK: query: -- data setup
-CREATE TABLE part( 
-    p_partkey INT,
-    p_name STRING,
-    p_mfgr STRING,
-    p_brand STRING,
-    p_type STRING,
-    p_size INT,
-    p_container STRING,
-    p_retailprice DOUBLE,
-    p_comment STRING
-)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table part
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@part
 PREHOOK: query: -- 1. testLeadUDAF
 select p_mfgr, p_retailprice,
 lead(p_retailprice) over (partition by p_mfgr order by p_name) as l1,

Modified: hive/branches/spark/ql/src/test/results/clientpositive/load_dyn_part14_win.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/load_dyn_part14_win.q.out?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/load_dyn_part14_win.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/load_dyn_part14_win.q.out Fri Nov  7 20:41:34 2014
@@ -5,6 +5,8 @@ PREHOOK: query: -- INCLUDE_OS_WINDOWS
 create table if not exists nzhang_part14 (key string) 
   partitioned by (value string)
 PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@nzhang_part14
 POSTHOOK: query: -- INCLUDE_OS_WINDOWS
 -- included only on  windows because of difference in file name encoding logic
 
@@ -12,18 +14,21 @@ POSTHOOK: query: -- INCLUDE_OS_WINDOWS
 create table if not exists nzhang_part14 (key string) 
   partitioned by (value string)
 POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
 POSTHOOK: Output: default@nzhang_part14
 PREHOOK: query: describe extended nzhang_part14
 PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@nzhang_part14
 POSTHOOK: query: describe extended nzhang_part14
 POSTHOOK: type: DESCTABLE
-key                 	string              	None                
-value               	string              	None                
+POSTHOOK: Input: default@nzhang_part14
+key                 	string              	                    
+value               	string              	                    
 	 	 
 # Partition Information	 	 
 # col_name            	data_type           	comment             
 	 	 
-value               	string              	None                
+value               	string              	                    
 	 	 
 #### A masked pattern was here ####
 PREHOOK: query: explain
@@ -46,9 +51,6 @@ select key, value from (
   select 'k3' as key, ' ' as value from src limit 2
 ) T
 POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'k1' key) (TOK_SELEXPR (TOK_FUNCTION TOK_STRING TOK_NULL) value)) (TOK_LIMIT 2))) (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'k2' key) (TOK_SELEXPR '' value)) (TOK_LIMIT 2)))) (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 'k3' key) (TOK_SELEXPR ' ' value)) (TOK_LIMIT 2)))) T)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME nzhang_part14) (TOK_PARTSPEC (TOK_PARTVAL value)))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)))))
-
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
   Stage-2 depends on stages: Stage-1, Stage-9, Stage-10
@@ -65,88 +67,79 @@ STAGE DEPENDENCIES:
 STAGE PLANS:
   Stage: Stage-1
     Map Reduce
-      Alias -> Map Operator Tree:
-        null-subquery1-subquery2:t-subquery1-subquery2:src 
+      Map Operator Tree:
           TableScan
             alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
             Select Operator
-              expressions:
-                    expr: 'k2'
-                    type: string
-                    expr: ''
-                    type: string
+              expressions: 'k2' (type: string), '' (type: string)
               outputColumnNames: _col0, _col1
+              Statistics: Num rows: 500 Data size: 85000 Basic stats: COMPLETE Column stats: COMPLETE
               Limit
+                Number of rows: 2
+                Statistics: Num rows: 2 Data size: 340 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   sort order: 
-                  tag: -1
-                  value expressions:
-                        expr: _col0
-                        type: string
-                        expr: _col1
-                        type: string
+                  Statistics: Num rows: 2 Data size: 340 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col0 (type: string), _col1 (type: string)
       Reduce Operator Tree:
-        Extract
+        Select Operator
+          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 2 Data size: 340 Basic stats: COMPLETE Column stats: COMPLETE
           Limit
+            Number of rows: 2
+            Statistics: Num rows: 2 Data size: 340 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator
               compressed: false
-              GlobalTableId: 0
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
 
   Stage: Stage-2
     Map Reduce
-      Alias -> Map Operator Tree:
-#### A masked pattern was here ####
+      Map Operator Tree:
           TableScan
             Union
+              Statistics: Num rows: 6 Data size: 1022 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
-                expressions:
-                      expr: _col0
-                      type: string
-                      expr: _col1
-                      type: string
+                expressions: _col0 (type: string), _col1 (type: string)
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 6 Data size: 1026 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  GlobalTableId: 1
+                  Statistics: Num rows: 6 Data size: 1026 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.nzhang_part14
-#### A masked pattern was here ####
           TableScan
             Union
+              Statistics: Num rows: 6 Data size: 1022 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
-                expressions:
-                      expr: _col0
-                      type: string
-                      expr: _col1
-                      type: string
+                expressions: _col0 (type: string), _col1 (type: string)
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 6 Data size: 1026 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  GlobalTableId: 1
+                  Statistics: Num rows: 6 Data size: 1026 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                       name: default.nzhang_part14
-#### A masked pattern was here ####
           TableScan
             Union
+              Statistics: Num rows: 6 Data size: 1022 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
-                expressions:
-                      expr: _col0
-                      type: string
-                      expr: _col1
-                      type: string
+                expressions: _col0 (type: string), _col1 (type: string)
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 6 Data size: 1026 Basic stats: COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  GlobalTableId: 1
+                  Statistics: Num rows: 6 Data size: 1026 Basic stats: COMPLETE Column stats: COMPLETE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -179,11 +172,10 @@ STAGE PLANS:
 
   Stage: Stage-4
     Map Reduce
-      Alias -> Map Operator Tree:
-#### A masked pattern was here ####
+      Map Operator Tree:
+          TableScan
             File Output Operator
               compressed: false
-              GlobalTableId: 0
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -192,11 +184,10 @@ STAGE PLANS:
 
   Stage: Stage-6
     Map Reduce
-      Alias -> Map Operator Tree:
-#### A masked pattern was here ####
+      Map Operator Tree:
+          TableScan
             File Output Operator
               compressed: false
-              GlobalTableId: 0
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -211,68 +202,67 @@ STAGE PLANS:
 
   Stage: Stage-9
     Map Reduce
-      Alias -> Map Operator Tree:
-        null-subquery2:t-subquery2:src 
+      Map Operator Tree:
           TableScan
             alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
             Select Operator
-              expressions:
-                    expr: 'k3'
-                    type: string
-                    expr: ' '
-                    type: string
+              expressions: 'k3' (type: string), ' ' (type: string)
               outputColumnNames: _col0, _col1
+              Statistics: Num rows: 500 Data size: 85500 Basic stats: COMPLETE Column stats: COMPLETE
               Limit
+                Number of rows: 2
+                Statistics: Num rows: 2 Data size: 342 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   sort order: 
-                  tag: -1
-                  value expressions:
-                        expr: _col0
-                        type: string
-                        expr: _col1
-                        type: string
+                  Statistics: Num rows: 2 Data size: 342 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col0 (type: string), _col1 (type: string)
       Reduce Operator Tree:
-        Extract
+        Select Operator
+          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 2 Data size: 342 Basic stats: COMPLETE Column stats: COMPLETE
           Limit
+            Number of rows: 2
+            Statistics: Num rows: 2 Data size: 342 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator
               compressed: false
-              GlobalTableId: 0
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
 
   Stage: Stage-10
     Map Reduce
-      Alias -> Map Operator Tree:
-        null-subquery1-subquery1:t-subquery1-subquery1:src 
+      Map Operator Tree:
           TableScan
             alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE
             Select Operator
-              expressions:
-                    expr: 'k1'
-                    type: string
-                    expr: UDFToString(null)
-                    type: string
+              expressions: 'k1' (type: string), UDFToString(null) (type: string)
               outputColumnNames: _col0, _col1
+              Statistics: Num rows: 500 Data size: 85000 Basic stats: COMPLETE Column stats: COMPLETE
               Limit
+                Number of rows: 2
+                Statistics: Num rows: 2 Data size: 340 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   sort order: 
-                  tag: -1
-                  value expressions:
-                        expr: _col0
-                        type: string
-                        expr: _col1
-                        type: string
+                  Statistics: Num rows: 2 Data size: 340 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col0 (type: string), _col1 (type: string)
       Reduce Operator Tree:
-        Extract
+        Select Operator
+          expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 2 Data size: 340 Basic stats: COMPLETE Column stats: COMPLETE
           Limit
+            Number of rows: 2
+            Statistics: Num rows: 2 Data size: 340 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator
               compressed: false
-              GlobalTableId: 0
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-
+                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
 
 PREHOOK: query: insert overwrite table nzhang_part14 partition(value) 
 select key, value from (
@@ -295,37 +285,35 @@ select key, value from (
 ) T
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
-POSTHOOK: Output: default@nzhang_part14@value=%20
+POSTHOOK: Output: default@nzhang_part14@value=%2520
 POSTHOOK: Output: default@nzhang_part14@value=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Lineage: nzhang_part14 PARTITION(value= ).key EXPRESSION []
+POSTHOOK: Lineage: nzhang_part14 PARTITION(value=%20).key EXPRESSION []
 POSTHOOK: Lineage: nzhang_part14 PARTITION(value=__HIVE_DEFAULT_PARTITION__).key EXPRESSION []
 PREHOOK: query: show partitions nzhang_part14
 PREHOOK: type: SHOWPARTITIONS
+PREHOOK: Input: default@nzhang_part14
 POSTHOOK: query: show partitions nzhang_part14
 POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Lineage: nzhang_part14 PARTITION(value= ).key EXPRESSION []
-POSTHOOK: Lineage: nzhang_part14 PARTITION(value=__HIVE_DEFAULT_PARTITION__).key EXPRESSION []
-value=%20
+POSTHOOK: Input: default@nzhang_part14
+value=%2520
 value=__HIVE_DEFAULT_PARTITION__
 PREHOOK: query: select * from nzhang_part14 where value <> 'a'
 order by key, value
 PREHOOK: type: QUERY
 PREHOOK: Input: default@nzhang_part14
-PREHOOK: Input: default@nzhang_part14@value=%20
+PREHOOK: Input: default@nzhang_part14@value=%2520
 PREHOOK: Input: default@nzhang_part14@value=__HIVE_DEFAULT_PARTITION__
 #### A masked pattern was here ####
 POSTHOOK: query: select * from nzhang_part14 where value <> 'a'
 order by key, value
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@nzhang_part14
-POSTHOOK: Input: default@nzhang_part14@value=%20
+POSTHOOK: Input: default@nzhang_part14@value=%2520
 POSTHOOK: Input: default@nzhang_part14@value=__HIVE_DEFAULT_PARTITION__
 #### A masked pattern was here ####
-POSTHOOK: Lineage: nzhang_part14 PARTITION(value= ).key EXPRESSION []
-POSTHOOK: Lineage: nzhang_part14 PARTITION(value=__HIVE_DEFAULT_PARTITION__).key EXPRESSION []
 k1	__HIVE_DEFAULT_PARTITION__
 k1	__HIVE_DEFAULT_PARTITION__
 k2	__HIVE_DEFAULT_PARTITION__
 k2	__HIVE_DEFAULT_PARTITION__
-k3	 
-k3	 
+k3	%20
+k3	%20

Modified: hive/branches/spark/ql/src/test/results/clientpositive/orc_merge1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/orc_merge1.q.out?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/orc_merge1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/orc_merge1.q.out Fri Nov  7 20:41:34 2014
@@ -110,48 +110,8 @@ POSTHOOK: Lineage: orcfile_merge1 PARTIT
 POSTHOOK: Lineage: orcfile_merge1 PARTITION(ds=1,part=0).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: orcfile_merge1 PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: orcfile_merge1 PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: DESC FORMATTED orcfile_merge1 partition (ds='1', part='0')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@orcfile_merge1
-POSTHOOK: query: DESC FORMATTED orcfile_merge1 partition (ds='1', part='0')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@orcfile_merge1
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-part                	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1, 0]              	 
-Database:           	default             	 
-Table:              	orcfile_merge1      	 
-#### A masked pattern was here ####
-Protect Mode:       	None                	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	2                   
-	numRows             	242                 
-	rawDataSize         	22748               
-	totalSize           	1747                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
+Found 2 items
+#### A masked pattern was here ####
 PREHOOK: query: -- auto-merge slow way
 EXPLAIN
     INSERT OVERWRITE TABLE orcfile_merge1b PARTITION (ds='1', part)
@@ -266,48 +226,8 @@ POSTHOOK: Lineage: orcfile_merge1b PARTI
 POSTHOOK: Lineage: orcfile_merge1b PARTITION(ds=1,part=0).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: orcfile_merge1b PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: orcfile_merge1b PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: DESC FORMATTED orcfile_merge1b partition (ds='1', part='0')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@orcfile_merge1b
-POSTHOOK: query: DESC FORMATTED orcfile_merge1b partition (ds='1', part='0')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@orcfile_merge1b
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-part                	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1, 0]              	 
-Database:           	default             	 
-Table:              	orcfile_merge1b     	 
-#### A masked pattern was here ####
-Protect Mode:       	None                	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	242                 
-	rawDataSize         	22748               
-	totalSize           	1332                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
+Found 1 items
+#### A masked pattern was here ####
 PREHOOK: query: -- auto-merge fast way
 EXPLAIN
     INSERT OVERWRITE TABLE orcfile_merge1c PARTITION (ds='1', part)
@@ -412,48 +332,8 @@ POSTHOOK: Lineage: orcfile_merge1c PARTI
 POSTHOOK: Lineage: orcfile_merge1c PARTITION(ds=1,part=0).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: orcfile_merge1c PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: orcfile_merge1c PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: DESC FORMATTED orcfile_merge1c partition (ds='1', part='0')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@orcfile_merge1c
-POSTHOOK: query: DESC FORMATTED orcfile_merge1c partition (ds='1', part='0')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@orcfile_merge1c
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-part                	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1, 0]              	 
-Database:           	default             	 
-Table:              	orcfile_merge1c     	 
-#### A masked pattern was here ####
-Protect Mode:       	None                	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	242                 
-	rawDataSize         	22748               
-	totalSize           	1623                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
+Found 1 items
+#### A masked pattern was here ####
 PREHOOK: query: -- Verify
 SELECT SUM(HASH(c)) FROM (
     SELECT TRANSFORM(*) USING 'tr \t _' AS (c)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/orc_merge2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/orc_merge2.q.out?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/orc_merge2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/orc_merge2.q.out Fri Nov  7 20:41:34 2014
@@ -173,49 +173,8 @@ POSTHOOK: Lineage: orcfile_merge2a PARTI
 POSTHOOK: Lineage: orcfile_merge2a PARTITION(one=1,two=9,three=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: orcfile_merge2a PARTITION(one=1,two=9,three=7).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: orcfile_merge2a PARTITION(one=1,two=9,three=7).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: DESC FORMATTED orcfile_merge2a partition (one='1', two='0', three='2')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@orcfile_merge2a
-POSTHOOK: query: DESC FORMATTED orcfile_merge2a partition (one='1', two='0', three='2')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@orcfile_merge2a
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-one                 	string              	                    
-two                 	string              	                    
-three               	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1, 0, 2]           	 
-Database:           	default             	 
-Table:              	orcfile_merge2a     	 
-#### A masked pattern was here ####
-Protect Mode:       	None                	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	4                   
-	rawDataSize         	376                 
-	totalSize           	320                 
+Found 1 items
 #### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
 PREHOOK: query: SELECT SUM(HASH(c)) FROM (
     SELECT TRANSFORM(*) USING 'tr \t _' AS (c)
     FROM orcfile_merge2a

Modified: hive/branches/spark/ql/src/test/results/clientpositive/orc_merge3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/orc_merge3.q.out?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/orc_merge3.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/orc_merge3.q.out Fri Nov  7 20:41:34 2014
@@ -142,42 +142,8 @@ POSTHOOK: Input: default@orcfile_merge3a
 POSTHOOK: Output: default@orcfile_merge3b
 POSTHOOK: Lineage: orcfile_merge3b.key SIMPLE [(orcfile_merge3a)orcfile_merge3a.FieldSchema(name:key, type:int, comment:null), ]
 POSTHOOK: Lineage: orcfile_merge3b.value SIMPLE [(orcfile_merge3a)orcfile_merge3a.FieldSchema(name:value, type:string, comment:null), ]
-PREHOOK: query: DESC FORMATTED orcfile_merge3b
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@orcfile_merge3b
-POSTHOOK: query: DESC FORMATTED orcfile_merge3b
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@orcfile_merge3b
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-value               	string              	                    
-	 	 
-# Detailed Table Information	 	 
-Database:           	default             	 
-#### A masked pattern was here ####
-Protect Mode:       	None                	 
-Retention:          	0                   	 
-#### A masked pattern was here ####
-Table Type:         	MANAGED_TABLE       	 
-Table Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	1000                
-	rawDataSize         	94000               
-	totalSize           	4834                
+Found 1 items
 #### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
 PREHOOK: query: SELECT SUM(HASH(c)) FROM (
     SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
     FROM orcfile_merge3a

Modified: hive/branches/spark/ql/src/test/results/clientpositive/orc_merge4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/orc_merge4.q.out?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/orc_merge4.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/orc_merge4.q.out Fri Nov  7 20:41:34 2014
@@ -36,47 +36,8 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@orcfile_merge3a@ds=1
 POSTHOOK: Lineage: orcfile_merge3a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: orcfile_merge3a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: DESC FORMATTED orcfile_merge3a PARTITION (ds='1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@orcfile_merge3a
-POSTHOOK: query: DESC FORMATTED orcfile_merge3a PARTITION (ds='1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@orcfile_merge3a
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1]                 	 
-Database:           	default             	 
-Table:              	orcfile_merge3a     	 
-#### A masked pattern was here ####
-Protect Mode:       	None                	 
+Found 1 items
 #### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	500                 
-	rawDataSize         	47000               
-	totalSize           	2496                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
 PREHOOK: query: INSERT OVERWRITE TABLE orcfile_merge3a PARTITION (ds='1')
     SELECT * FROM src
 PREHOOK: type: QUERY
@@ -101,88 +62,10 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@orcfile_merge3a@ds=2
 POSTHOOK: Lineage: orcfile_merge3a PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: orcfile_merge3a PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: DESC FORMATTED orcfile_merge3a PARTITION (ds='1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@orcfile_merge3a
-POSTHOOK: query: DESC FORMATTED orcfile_merge3a PARTITION (ds='1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@orcfile_merge3a
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[1]                 	 
-Database:           	default             	 
-Table:              	orcfile_merge3a     	 
-#### A masked pattern was here ####
-Protect Mode:       	None                	 
-#### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	500                 
-	rawDataSize         	47000               
-	totalSize           	2496                
-#### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
-PREHOOK: query: DESC FORMATTED orcfile_merge3a PARTITION (ds='2')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@orcfile_merge3a
-POSTHOOK: query: DESC FORMATTED orcfile_merge3a PARTITION (ds='2')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@orcfile_merge3a
-# col_name            	data_type           	comment             
-	 	 
-key                 	int                 	                    
-value               	string              	                    
-	 	 
-# Partition Information	 	 
-# col_name            	data_type           	comment             
-	 	 
-ds                  	string              	                    
-	 	 
-# Detailed Partition Information	 	 
-Partition Value:    	[2]                 	 
-Database:           	default             	 
-Table:              	orcfile_merge3a     	 
-#### A masked pattern was here ####
-Protect Mode:       	None                	 
+Found 1 items
 #### A masked pattern was here ####
-Partition Parameters:	 	 
-	COLUMN_STATS_ACCURATE	true                
-	numFiles            	1                   
-	numRows             	500                 
-	rawDataSize         	47000               
-	totalSize           	2496                
+Found 1 items
 #### A masked pattern was here ####
-	 	 
-# Storage Information	 	 
-SerDe Library:      	org.apache.hadoop.hive.ql.io.orc.OrcSerde	 
-InputFormat:        	org.apache.hadoop.hive.ql.io.orc.OrcInputFormat	 
-OutputFormat:       	org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat	 
-Compressed:         	No                  	 
-Num Buckets:        	-1                  	 
-Bucket Columns:     	[]                  	 
-Sort Columns:       	[]                  	 
-Storage Desc Params:	 	 
-	serialization.format	1                   
 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE orcfile_merge3b
     SELECT key, value FROM orcfile_merge3a
 PREHOOK: type: QUERY