You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/10/06 05:44:26 UTC

svn commit: r1629562 [25/38] - in /hive/branches/spark: ./ accumulo-handler/ beeline/ beeline/src/java/org/apache/hive/beeline/ bin/ext/ common/ common/src/java/org/apache/hadoop/hive/conf/ common/src/test/org/apache/hadoop/hive/common/type/ contrib/sr...

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/multigroupby_singlemr.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/multigroupby_singlemr.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/multigroupby_singlemr.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/multigroupby_singlemr.q.out Mon Oct  6 03:44:13 2014
@@ -67,6 +67,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: tbl
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   File Output Operator
                     compressed: false
                     table:
@@ -86,15 +87,18 @@ STAGE PLANS:
                   Select Operator
                     expressions: c1 (type: int), c2 (type: int)
                     outputColumnNames: c1, c2
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     Group By Operator
                       aggregations: count(c2)
                       keys: c1 (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: int)
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                         value expressions: _col1 (type: bigint)
         Reducer 4 
             Reduce Operator Tree:
@@ -103,11 +107,14 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: int), UDFToInteger(_col1) (type: int)
                   outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   File Output Operator
                     compressed: false
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -155,15 +162,18 @@ STAGE PLANS:
                   Select Operator
                     expressions: c1 (type: int), c2 (type: int), c3 (type: int)
                     outputColumnNames: c1, c2, c3
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     Group By Operator
                       aggregations: count(c3)
                       keys: c1 (type: int), c2 (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int), _col1 (type: int)
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: int), _col1 (type: int)
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                         value expressions: _col2 (type: bigint)
         Reducer 5 
             Reduce Operator Tree:
@@ -172,11 +182,14 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int), KEY._col1 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: int), _col1 (type: int), UDFToInteger(_col2) (type: int)
                   outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   File Output Operator
                     compressed: false
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -212,6 +225,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: tbl
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   File Output Operator
                     compressed: false
                     table:
@@ -231,15 +245,18 @@ STAGE PLANS:
                   Select Operator
                     expressions: c1 (type: int), c2 (type: int)
                     outputColumnNames: c1, c2
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     Group By Operator
                       aggregations: count(c2)
                       keys: c1 (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: int)
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                         value expressions: _col1 (type: bigint)
         Reducer 4 
             Reduce Operator Tree:
@@ -248,11 +265,14 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: int), UDFToInteger(_col1) (type: int)
                   outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   File Output Operator
                     compressed: false
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -300,15 +320,18 @@ STAGE PLANS:
                   Select Operator
                     expressions: c2 (type: int), c1 (type: int), c3 (type: int)
                     outputColumnNames: c2, c1, c3
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     Group By Operator
                       aggregations: count(c3)
                       keys: c2 (type: int), c1 (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int), _col1 (type: int)
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: int), _col1 (type: int)
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                         value expressions: _col2 (type: bigint)
         Reducer 5 
             Reduce Operator Tree:
@@ -317,11 +340,14 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int), KEY._col1 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                 Select Operator
                   expressions: _col1 (type: int), _col0 (type: int), UDFToInteger(_col2) (type: int)
                   outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   File Output Operator
                     compressed: false
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -357,6 +383,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: tbl
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   File Output Operator
                     compressed: false
                     table:
@@ -376,15 +403,18 @@ STAGE PLANS:
                   Select Operator
                     expressions: c1 (type: int), c2 (type: int), c3 (type: int), c4 (type: int)
                     outputColumnNames: c1, c2, c3, c4
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     Group By Operator
                       aggregations: count(c4)
                       keys: c1 (type: int), c2 (type: int), c3 (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1, _col2, _col3
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int)
                         sort order: +++
                         Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                         value expressions: _col3 (type: bigint)
         Reducer 4 
             Reduce Operator Tree:
@@ -393,11 +423,14 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int), KEY._col1 (type: int), KEY._col2 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), UDFToInteger(_col3) (type: int)
                   outputColumnNames: _col0, _col1, _col2, _col3
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   File Output Operator
                     compressed: false
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -445,15 +478,18 @@ STAGE PLANS:
                   Select Operator
                     expressions: c1 (type: int), c2 (type: int), c3 (type: int)
                     outputColumnNames: c1, c2, c3
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     Group By Operator
                       aggregations: count(c3)
                       keys: c1 (type: int), c2 (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int), _col1 (type: int)
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: int), _col1 (type: int)
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                         value expressions: _col2 (type: bigint)
         Reducer 5 
             Reduce Operator Tree:
@@ -462,11 +498,14 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int), KEY._col1 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: int), _col1 (type: int), UDFToInteger(_col2) (type: int)
                   outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   File Output Operator
                     compressed: false
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -502,27 +541,34 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: tbl
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   Select Operator
                     expressions: c1 (type: int), c2 (type: int), c3 (type: int), c4 (type: int)
                     outputColumnNames: c1, c2, c3, c4
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     Reduce Output Operator
                       key expressions: c1 (type: int), c2 (type: int), c3 (type: int)
                       sort order: +++
                       Map-reduce partition columns: c1 (type: int), c2 (type: int), c3 (type: int)
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                       value expressions: c4 (type: int)
         Reducer 2 
             Reduce Operator Tree:
               Forward
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                 Group By Operator
                   aggregations: count(VALUE._col0)
                   keys: KEY._col0 (type: int), KEY._col1 (type: int), KEY._col2 (type: int)
                   mode: complete
                   outputColumnNames: _col0, _col1, _col2, _col3
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), UDFToInteger(_col3) (type: int)
                     outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     File Output Operator
                       compressed: false
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -533,11 +579,14 @@ STAGE PLANS:
                   keys: KEY._col0 (type: int), KEY._col2 (type: int), KEY._col1 (type: int)
                   mode: complete
                   outputColumnNames: _col0, _col1, _col2, _col3
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: int), _col2 (type: int), _col1 (type: int), UDFToInteger(_col3) (type: int)
                     outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     File Output Operator
                       compressed: false
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -607,6 +656,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: tbl
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   File Output Operator
                     compressed: false
                     table:
@@ -626,15 +676,18 @@ STAGE PLANS:
                   Select Operator
                     expressions: c1 (type: int), c2 (type: int), c3 (type: int), c4 (type: int)
                     outputColumnNames: c1, c2, c3, c4
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     Group By Operator
                       aggregations: count(c4)
                       keys: c1 (type: int), c2 (type: int), c3 (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1, _col2, _col3
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int)
                         sort order: +++
                         Map-reduce partition columns: _col0 (type: int), _col1 (type: int), _col2 (type: int)
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                         value expressions: _col3 (type: bigint)
         Reducer 5 
             Reduce Operator Tree:
@@ -643,11 +696,14 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int), KEY._col1 (type: int), KEY._col2 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: int), _col1 (type: int), _col2 (type: int), UDFToInteger(_col3) (type: int)
                   outputColumnNames: _col0, _col1, _col2, _col3
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   File Output Operator
                     compressed: false
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -708,15 +764,18 @@ STAGE PLANS:
                   Select Operator
                     expressions: c1 (type: int), c2 (type: int), c3 (type: int)
                     outputColumnNames: c1, c2, c3
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     Group By Operator
                       aggregations: count(c3)
                       keys: c1 (type: int), c2 (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int), _col1 (type: int)
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: int), _col1 (type: int)
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                         value expressions: _col2 (type: bigint)
         Reducer 6 
             Reduce Operator Tree:
@@ -725,11 +784,14 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int), KEY._col1 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: int), _col1 (type: int), UDFToInteger(_col2) (type: int)
                   outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   File Output Operator
                     compressed: false
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -748,15 +810,18 @@ STAGE PLANS:
                   Select Operator
                     expressions: c1 (type: int), c2 (type: int)
                     outputColumnNames: c1, c2
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     Group By Operator
                       aggregations: count(c2)
                       keys: c1 (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: int)
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                         value expressions: _col1 (type: bigint)
         Reducer 7 
             Reduce Operator Tree:
@@ -765,11 +830,14 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: int), UDFToInteger(_col1) (type: int)
                   outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                   File Output Operator
                     compressed: false
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out Mon Oct  6 03:44:13 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/order.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/order.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/order.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/order.q.out Mon Oct  6 03:44:13 2014
@@ -19,22 +19,28 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: x
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: string)
                       sort order: +
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col1 (type: string)
         Reducer 2 
             Reduce Operator Tree:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Limit
                   Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
+                    Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -85,22 +91,28 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: x
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: string)
                       sort order: -
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col1 (type: string)
         Reducer 2 
             Reduce Operator Tree:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Limit
                   Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
+                    Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/order2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/order2.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/order2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/order2.q.out Mon Oct  6 03:44:13 2014
@@ -23,27 +23,35 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: x
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: string)
                       sort order: +
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col1 (type: string)
         Reducer 2 
             Reduce Operator Tree:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Limit
                   Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (_col0 < 10) (type: boolean)
+                    Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string)
                       outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
+                        Statistics: Num rows: 3 Data size: 30 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel.q.out Mon Oct  6 03:44:13 2014
@@ -48,42 +48,53 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: key, value
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Group By Operator
                       keys: key (type: string), value (type: string)
                       mode: hash
                       outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: string), _col1 (type: string)
                         sort order: ++
                         Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+                        Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
         Reducer 2 
             Reduce Operator Tree:
               Group By Operator
                 keys: KEY._col0 (type: string), KEY._col1 (type: string)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: string), _col1 (type: string)
                   outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
                     key expressions: _col0 (type: string), _col1 (type: string)
                     sort order: ++
                     Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
         Reducer 3 
             Reduce Operator Tree:
               Forward
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                 Group By Operator
                   keys: KEY._col0 (type: string), KEY._col1 (type: string)
                   mode: complete
                   outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: string), _col1 (type: string)
                     outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
+                      Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -93,11 +104,14 @@ STAGE PLANS:
                   keys: KEY._col0 (type: string), KEY._col1 (type: string)
                   mode: complete
                   outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: string), _col1 (type: string)
                     outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
+                      Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel_join0.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel_join0.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel_join0.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel_join0.q.out Mon Oct  6 03:44:13 2014
@@ -34,25 +34,33 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (key < 10) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         sort order: 
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: string), _col1 (type: string)
         Map 4 
             Map Operator Tree:
                 TableScan
                   alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (key < 10) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         sort order: 
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: string), _col1 (type: string)
         Reducer 2 
             Reduce Operator Tree:
@@ -63,19 +71,24 @@ STAGE PLANS:
                   0 {VALUE._col0} {VALUE._col1}
                   1 {VALUE._col0} {VALUE._col1}
                 outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
                   outputColumnNames: _col0, _col1, _col2, _col3
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
                     key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
                     sort order: ++++
+                    Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
         Reducer 3 
             Reduce Operator Tree:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -122,6 +135,47 @@ POSTHOOK: query: SELECT src1.key as k1, 
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
+5	val_5	9	val_9
+5	val_5	9	val_9
+5	val_5	9	val_9
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	0	val_0
+8	val_8	2	val_2
+8	val_8	4	val_4
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	5	val_5
+8	val_8	8	val_8
+8	val_8	9	val_9
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	0	val_0
+9	val_9	2	val_2
+9	val_9	4	val_4
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	5	val_5
+9	val_9	8	val_8
+9	val_9	9	val_9
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	2	val_2
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	4	val_4
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	5	val_5
+5	val_5	8	val_8
+5	val_5	8	val_8
+5	val_5	8	val_8
 0	val_0	0	val_0
 0	val_0	0	val_0
 0	val_0	0	val_0
@@ -181,44 +235,3 @@ POSTHOOK: Input: default@src
 5	val_5	0	val_0
 5	val_5	0	val_0
 5	val_5	0	val_0
-5	val_5	9	val_9
-5	val_5	9	val_9
-5	val_5	9	val_9
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	0	val_0
-8	val_8	2	val_2
-8	val_8	4	val_4
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	5	val_5
-8	val_8	8	val_8
-8	val_8	9	val_9
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	0	val_0
-9	val_9	2	val_2
-9	val_9	4	val_4
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	5	val_5
-9	val_9	8	val_8
-9	val_9	9	val_9
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	2	val_2
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	4	val_4
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	5	val_5
-5	val_5	8	val_8
-5	val_5	8	val_8
-5	val_5	8	val_8

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel_join1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel_join1.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel_join1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/parallel_join1.q.out Mon Oct  6 03:44:13 2014
@@ -35,23 +35,29 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: src2
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: key (type: string)
                       sort order: +
                       Map-reduce partition columns: key (type: string)
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                       value expressions: value (type: string)
         Map 3 
             Map Operator Tree:
                 TableScan
                   alias: src1
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: key (type: string)
                       sort order: +
                       Map-reduce partition columns: key (type: string)
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
         Reducer 2 
             Reduce Operator Tree:
               Join Operator
@@ -61,11 +67,14 @@ STAGE PLANS:
                   0 {KEY.reducesinkkey0}
                   1 {VALUE._col0}
                 outputColumnNames: _col0, _col6
+                Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
                   expressions: UDFToInteger(_col0) (type: int), _col6 (type: string)
                   outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
+                    Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/ppd_multi_insert.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/ppd_multi_insert.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/ppd_multi_insert.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/ppd_multi_insert.q.out Mon Oct  6 03:44:13 2014
@@ -62,22 +62,28 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: b
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: key (type: string)
                       sort order: +
                       Map-reduce partition columns: key (type: string)
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
         Map 7 
             Map Operator Tree:
                 TableScan
                   alias: a
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: key (type: string)
                       sort order: +
                       Map-reduce partition columns: key (type: string)
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                       value expressions: value (type: string)
         Reducer 6 
             Reduce Operator Tree:
@@ -88,6 +94,7 @@ STAGE PLANS:
                   0 {KEY.reducesinkkey0} {VALUE._col0}
                   1 
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
                   table:
@@ -104,11 +111,14 @@ STAGE PLANS:
                 TableScan
                   Filter Operator
                     predicate: (_col0 < 100) (type: boolean)
+                    Statistics: Num rows: 91 Data size: 966 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
                       outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 91 Data size: 966 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
+                        Statistics: Num rows: 91 Data size: 966 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -169,11 +179,14 @@ STAGE PLANS:
                 TableScan
                   Filter Operator
                     predicate: ((_col0 >= 100) and (_col0 < 200)) (type: boolean)
+                    Statistics: Num rows: 30 Data size: 318 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
                       outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 30 Data size: 318 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
+                        Statistics: Num rows: 30 Data size: 318 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -189,11 +202,14 @@ STAGE PLANS:
                 TableScan
                   Filter Operator
                     predicate: ((_col0 >= 200) and (_col0 < 300)) (type: boolean)
+                    Statistics: Num rows: 30 Data size: 318 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: UDFToInteger(_col0) (type: int)
                       outputColumnNames: _col0
+                      Statistics: Num rows: 30 Data size: 318 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
+                        Statistics: Num rows: 30 Data size: 318 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -209,11 +225,14 @@ STAGE PLANS:
                 TableScan
                   Filter Operator
                     predicate: (_col0 >= 300) (type: boolean)
+                    Statistics: Num rows: 91 Data size: 966 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col1 (type: string)
                       outputColumnNames: _col0
+                      Statistics: Num rows: 91 Data size: 966 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
+                        Statistics: Num rows: 91 Data size: 966 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1349,22 +1368,28 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: b
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: key (type: string)
                       sort order: +
                       Map-reduce partition columns: key (type: string)
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
         Map 7 
             Map Operator Tree:
                 TableScan
                   alias: a
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: key (type: string)
                       sort order: +
                       Map-reduce partition columns: key (type: string)
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
                       value expressions: value (type: string)
         Reducer 6 
             Reduce Operator Tree:
@@ -1375,6 +1400,7 @@ STAGE PLANS:
                   0 {KEY.reducesinkkey0} {VALUE._col0}
                   1 
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
                   table:
@@ -1391,11 +1417,14 @@ STAGE PLANS:
                 TableScan
                   Filter Operator
                     predicate: (_col0 < 100) (type: boolean)
+                    Statistics: Num rows: 91 Data size: 966 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
                       outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 91 Data size: 966 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
+                        Statistics: Num rows: 91 Data size: 966 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1456,11 +1485,14 @@ STAGE PLANS:
                 TableScan
                   Filter Operator
                     predicate: ((_col0 >= 100) and (_col0 < 200)) (type: boolean)
+                    Statistics: Num rows: 30 Data size: 318 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: UDFToInteger(_col0) (type: int), _col1 (type: string)
                       outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 30 Data size: 318 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
+                        Statistics: Num rows: 30 Data size: 318 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1476,11 +1508,14 @@ STAGE PLANS:
                 TableScan
                   Filter Operator
                     predicate: ((_col0 >= 200) and (_col0 < 300)) (type: boolean)
+                    Statistics: Num rows: 30 Data size: 318 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: UDFToInteger(_col0) (type: int)
                       outputColumnNames: _col0
+                      Statistics: Num rows: 30 Data size: 318 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
+                        Statistics: Num rows: 30 Data size: 318 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1496,11 +1531,14 @@ STAGE PLANS:
                 TableScan
                   Filter Operator
                     predicate: (_col0 >= 300) (type: boolean)
+                    Statistics: Num rows: 91 Data size: 966 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col1 (type: string)
                       outputColumnNames: _col0
+                      Statistics: Num rows: 91 Data size: 966 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
+                        Statistics: Num rows: 91 Data size: 966 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/ppd_transform.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/ppd_transform.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/ppd_transform.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/ppd_transform.q.out Mon Oct  6 03:44:13 2014
@@ -31,32 +31,40 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Transform Operator
                       command: cat
                       output info:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: string), _col1 (type: string)
         Reducer 2 
             Reduce Operator Tree:
               Select Operator
                 expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 Filter Operator
                   predicate: (_col0 < 100) (type: boolean)
+                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: string), _col1 (type: string)
                     outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                       table:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -205,29 +213,36 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Transform Operator
                       command: cat
                       output info:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                       Filter Operator
                         predicate: (_col0 < 100) (type: boolean)
+                        Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                         Reduce Output Operator
                           key expressions: _col0 (type: string)
                           sort order: +
                           Map-reduce partition columns: _col0 (type: string)
+                          Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                           value expressions: _col0 (type: string), _col1 (type: string)
         Reducer 2 
             Reduce Operator Tree:
               Select Operator
                 expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
+                  Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -376,15 +391,18 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Transform Operator
                       command: cat
                       output info:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
                         table:
@@ -401,11 +419,14 @@ STAGE PLANS:
                 TableScan
                   Filter Operator
                     predicate: ((_col0 = 'a') or (_col0 = 'b')) (type: boolean)
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string)
                       outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
+                        Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -429,11 +450,14 @@ STAGE PLANS:
                 TableScan
                   Filter Operator
                     predicate: ((_col0 = 'c') or (_col0 = 'd')) (type: boolean)
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: _col0 (type: string), _col1 (type: string)
                       outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
+                        Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/sample1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/sample1.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/sample1.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/sample1.q.out Mon Oct  6 03:44:13 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/sample2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/sample2.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/sample2.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/sample2.q.out Mon Oct  6 03:44:13 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/sample3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/sample3.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/sample3.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/sample3.q.out Mon Oct  6 03:44:13 2014
@@ -18,11 +18,14 @@ STAGE PLANS:
       Processor Tree:
         TableScan
           alias: s
+          Statistics: Num rows: 1000 Data size: 10603 Basic stats: COMPLETE Column stats: NONE
           Filter Operator
             predicate: (((hash(key) & 2147483647) % 5) = 0) (type: boolean)
+            Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: key (type: int)
               outputColumnNames: _col0
+              Statistics: Num rows: 500 Data size: 5301 Basic stats: COMPLETE Column stats: NONE
               ListSink
 
 PREHOOK: query: SELECT s.key

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/sample4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/sample4.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/sample4.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/sample4.q.out Mon Oct  6 03:44:13 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/sample5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/sample5.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/sample5.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/sample5.q.out Mon Oct  6 03:44:13 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/sample6.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/sample6.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/sample6.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/sample6.q.out Mon Oct  6 03:44:13 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/sample7.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/sample7.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/sample7.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/sample7.q.out Mon Oct  6 03:44:13 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/sample8.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/sample8.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/sample8.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/sample8.q.out Mon Oct  6 03:44:13 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/sample9.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/sample9.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/sample9.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/sample9.q.out Mon Oct  6 03:44:13 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/script_pipe.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/script_pipe.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/script_pipe.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/script_pipe.q.out Mon Oct  6 03:44:13 2014
@@ -19,32 +19,41 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Limit
                       Number of rows: 1
+                      Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         sort order: 
+                        Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: string), _col1 (type: string)
         Reducer 2 
             Reduce Operator Tree:
               Select Operator
                 expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                 Limit
                   Number of rows: 1
+                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: string), _col1 (type: string)
                     outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                     Transform Operator
                       command: true
                       output info:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
+                        Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -75,17 +84,21 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: string), value (type: string), key (type: string), value (type: string), key (type: string), value (type: string), key (type: string), value (type: string), key (type: string), value (type: string), key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Transform Operator
                       command: head -n 1
                       output info:
                           input format: org.apache.hadoop.mapred.TextInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                           serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                       File Output Operator
                         compressed: false
+                        Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                         table:
                             input format: org.apache.hadoop.mapred.TextInputFormat
                             output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/sort.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/sort.q.out?rev=1629562&r1=1629561&r2=1629562&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/sort.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/sort.q.out Mon Oct  6 03:44:13 2014
@@ -19,20 +19,25 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: x
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: _col0 (type: string)
                       sort order: +
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                       value expressions: _col1 (type: string)
         Reducer 2 
             Reduce Operator Tree:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
                 outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat