You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2014/10/30 13:31:52 UTC

svn commit: r1635477 [8/17] - in /hive/branches/spark: common/src/java/org/apache/hadoop/hive/common/ data/conf/spark/ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/counter/ ql/src/java/org/apache/ha...

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into1.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into1.q.out Thu Oct 30 12:31:47 2014
@@ -85,7 +85,6 @@ PREHOOK: query: INSERT INTO TABLE insert
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@insert_into1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT INTO TABLE insert_into1 SELECT * from src LIMIT 100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -112,54 +111,12 @@ POSTHOOK: query: explain 
 select count(*) from insert_into1
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (GROUP, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: insert_into1
-                  Statistics: Num rows: 0 Data size: 1176 Basic stats: PARTIAL Column stats: COMPLETE
-                  Select Operator
-                    Statistics: Num rows: 0 Data size: 1176 Basic stats: PARTIAL Column stats: COMPLETE
-                    Group By Operator
-                      aggregations: count()
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: bigint)
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col0 (type: bigint)
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
-      limit: -1
+      limit: 1
       Processor Tree:
         ListSink
 
@@ -247,7 +204,6 @@ PREHOOK: query: INSERT INTO TABLE insert
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@insert_into1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT INTO TABLE insert_into1 SELECT * FROM src LIMIT 100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -274,54 +230,12 @@ POSTHOOK: query: explain
 SELECT COUNT(*) FROM insert_into1
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (GROUP, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: insert_into1
-                  Statistics: Num rows: 0 Data size: 2352 Basic stats: PARTIAL Column stats: COMPLETE
-                  Select Operator
-                    Statistics: Num rows: 0 Data size: 2352 Basic stats: PARTIAL Column stats: COMPLETE
-                    Group By Operator
-                      aggregations: count()
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: bigint)
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col0 (type: bigint)
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
-      limit: -1
+      limit: 1
       Processor Tree:
         ListSink
 
@@ -409,7 +323,6 @@ PREHOOK: query: INSERT OVERWRITE TABLE i
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@insert_into1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE insert_into1 SELECT * FROM src LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -436,54 +349,12 @@ POSTHOOK: query: explain
 SELECT COUNT(*) FROM insert_into1
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (GROUP, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: insert_into1
-                  Statistics: Num rows: -1 Data size: 114 Basic stats: PARTIAL Column stats: COMPLETE
-                  Select Operator
-                    Statistics: Num rows: -1 Data size: 114 Basic stats: PARTIAL Column stats: COMPLETE
-                    Group By Operator
-                      aggregations: count()
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                        value expressions: _col0 (type: bigint)
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                Select Operator
-                  expressions: _col0 (type: bigint)
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
-      limit: -1
+      limit: 1
       Processor Tree:
         ListSink
 

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into2.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into2.q.out Thu Oct 30 12:31:47 2014
@@ -91,7 +91,6 @@ PREHOOK: query: INSERT INTO TABLE insert
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@insert_into2@ds=1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT INTO TABLE insert_into2 PARTITION (ds='1') SELECT * FROM src limit 100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -105,73 +104,28 @@ POSTHOOK: query: explain
 select count (*) from insert_into2 where ds = '1'
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (GROUP, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: insert_into2
-                  Statistics: Num rows: 0 Data size: 1176 Basic stats: PARTIAL Column stats: NONE
-                  Select Operator
-                    Statistics: Num rows: 0 Data size: 1176 Basic stats: PARTIAL Column stats: NONE
-                    Group By Operator
-                      aggregations: count()
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: bigint)
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: bigint)
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
-      limit: -1
+      limit: 1
       Processor Tree:
         ListSink
 
 PREHOOK: query: select count (*) from insert_into2 where ds = '1'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@insert_into2
-PREHOOK: Input: default@insert_into2@ds=1
 #### A masked pattern was here ####
 POSTHOOK: query: select count (*) from insert_into2 where ds = '1'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@insert_into2
-POSTHOOK: Input: default@insert_into2@ds=1
 #### A masked pattern was here ####
 100
 PREHOOK: query: INSERT INTO TABLE insert_into2 PARTITION (ds='1') SELECT * FROM src limit 100
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@insert_into2@ds=1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT INTO TABLE insert_into2 PARTITION (ds='1') SELECT * FROM src limit 100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -185,66 +139,22 @@ POSTHOOK: query: explain
 SELECT COUNT(*) FROM insert_into2 WHERE ds='1'
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (GROUP, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: insert_into2
-                  Statistics: Num rows: 0 Data size: 2352 Basic stats: PARTIAL Column stats: NONE
-                  Select Operator
-                    Statistics: Num rows: 0 Data size: 2352 Basic stats: PARTIAL Column stats: NONE
-                    Group By Operator
-                      aggregations: count()
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: bigint)
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: bigint)
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
-      limit: -1
+      limit: 1
       Processor Tree:
         ListSink
 
 PREHOOK: query: SELECT COUNT(*) FROM insert_into2 WHERE ds='1'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@insert_into2
-PREHOOK: Input: default@insert_into2@ds=1
 #### A masked pattern was here ####
 POSTHOOK: query: SELECT COUNT(*) FROM insert_into2 WHERE ds='1'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@insert_into2
-POSTHOOK: Input: default@insert_into2@ds=1
 #### A masked pattern was here ####
 200
 PREHOOK: query: SELECT SUM(HASH(c)) FROM (
@@ -342,7 +252,6 @@ PREHOOK: query: INSERT OVERWRITE TABLE i
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@insert_into2@ds=2
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE insert_into2 PARTITION (ds='2')
   SELECT * FROM src LIMIT 100
 POSTHOOK: type: QUERY
@@ -374,66 +283,22 @@ POSTHOOK: query: explain
 SELECT COUNT(*) FROM insert_into2 WHERE ds='2'
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (GROUP, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: insert_into2
-                  Statistics: Num rows: 0 Data size: 1176 Basic stats: PARTIAL Column stats: NONE
-                  Select Operator
-                    Statistics: Num rows: 0 Data size: 1176 Basic stats: PARTIAL Column stats: NONE
-                    Group By Operator
-                      aggregations: count()
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: bigint)
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: bigint)
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
-      limit: -1
+      limit: 1
       Processor Tree:
         ListSink
 
 PREHOOK: query: SELECT COUNT(*) FROM insert_into2 WHERE ds='2'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@insert_into2
-PREHOOK: Input: default@insert_into2@ds=2
 #### A masked pattern was here ####
 POSTHOOK: query: SELECT COUNT(*) FROM insert_into2 WHERE ds='2'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@insert_into2
-POSTHOOK: Input: default@insert_into2@ds=2
 #### A masked pattern was here ####
 100
 PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE insert_into2 PARTITION (ds='2')
@@ -516,7 +381,6 @@ PREHOOK: query: INSERT OVERWRITE TABLE i
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@insert_into2@ds=2
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE insert_into2 PARTITION (ds='2')
   SELECT * FROM src LIMIT 50
 POSTHOOK: type: QUERY
@@ -548,66 +412,22 @@ POSTHOOK: query: explain
 SELECT COUNT(*) FROM insert_into2 WHERE ds='2'
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
+  Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Spark
-      Edges:
-        Reducer 2 <- Map 1 (GROUP, 1)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: insert_into2
-                  Statistics: Num rows: 0 Data size: 586 Basic stats: PARTIAL Column stats: NONE
-                  Select Operator
-                    Statistics: Num rows: 0 Data size: 586 Basic stats: PARTIAL Column stats: NONE
-                    Group By Operator
-                      aggregations: count()
-                      mode: hash
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: bigint)
-        Reducer 2 
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: count(VALUE._col0)
-                mode: mergepartial
-                outputColumnNames: _col0
-                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col0 (type: bigint)
-                  outputColumnNames: _col0
-                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
-      limit: -1
+      limit: 1
       Processor Tree:
         ListSink
 
 PREHOOK: query: SELECT COUNT(*) FROM insert_into2 WHERE ds='2'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@insert_into2
-PREHOOK: Input: default@insert_into2@ds=2
 #### A masked pattern was here ####
 POSTHOOK: query: SELECT COUNT(*) FROM insert_into2 WHERE ds='2'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@insert_into2
-POSTHOOK: Input: default@insert_into2@ds=2
 #### A masked pattern was here ####
 50
 PREHOOK: query: DROP TABLE insert_into2

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into3.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into3.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/insert_into3.q.out Thu Oct 30 12:31:47 2014
@@ -143,8 +143,6 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@insert_into3a
 PREHOOK: Output: default@insert_into3b
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
          INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100
 POSTHOOK: type: QUERY
@@ -308,8 +306,6 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@insert_into3a
 PREHOOK: Output: default@insert_into3b
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: FROM src INSERT OVERWRITE TABLE insert_into3a SELECT * LIMIT 10
          INSERT INTO TABLE insert_into3b SELECT * LIMIT 10
 POSTHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join1.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join1.q.out Thu Oct 30 12:31:47 2014
@@ -98,7 +98,6 @@ INSERT OVERWRITE TABLE dest_j1 SELECT sr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest_j1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key)
 INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value
 POSTHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join14.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join14.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join14.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join14.q.out Thu Oct 30 12:31:47 2014
@@ -105,7 +105,6 @@ PREHOOK: Input: default@srcpart
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
 PREHOOK: Output: default@dest1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: FROM src JOIN srcpart ON src.key = srcpart.key AND srcpart.ds = '2008-04-08' and src.key > 100
 INSERT OVERWRITE TABLE dest1 SELECT src.key, srcpart.value
 POSTHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join17.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join17.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join17.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join17.q.out Thu Oct 30 12:31:47 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join2.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join2.q.out Thu Oct 30 12:31:47 2014
@@ -131,7 +131,6 @@ INSERT OVERWRITE TABLE dest_j2 SELECT sr
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest_j2
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key)
 INSERT OVERWRITE TABLE dest_j2 SELECT src1.key, src3.value
 POSTHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join24.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join24.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join24.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join24.q.out Thu Oct 30 12:31:47 2014
@@ -11,7 +11,6 @@ SELECT a.key, count(1) FROM src a group 
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@tst1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE tst1
 SELECT a.key, count(1) FROM src a group by a.key
 POSTHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join25.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join25.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join25.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join25.q.out Thu Oct 30 12:31:47 2014
@@ -107,7 +107,6 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@src1
 PREHOOK: Output: default@dest_j1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 
 SELECT /*+ MAPJOIN(x) */ x.key, x.value, y.value
 FROM src1 x JOIN src y ON (x.key = y.key)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join26.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join26.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join26.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join26.q.out Thu Oct 30 12:31:47 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join27.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join27.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join27.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join27.q.out Thu Oct 30 12:31:47 2014
@@ -106,7 +106,6 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@src1
 PREHOOK: Output: default@dest_j1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 
 SELECT /*+ MAPJOIN(x) */ x.key, x.value, y.value
 FROM src1 x JOIN src y ON (x.value = y.value)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join28.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join28.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join28.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join28.q.out Thu Oct 30 12:31:47 2014
@@ -154,7 +154,6 @@ PREHOOK: Input: default@src1
 PREHOOK: Input: default@srcpart
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 PREHOOK: Output: default@dest_j1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 
 SELECT subq.key1, z.value
 FROM

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join29.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join29.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join29.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join29.q.out Thu Oct 30 12:31:47 2014
@@ -172,7 +172,6 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@src1
 PREHOOK: Output: default@dest_j1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 
 SELECT subq1.key, subq1.cnt, subq2.cnt
 FROM (select x.key, count(1) as cnt from src1 x group by x.key) subq1 JOIN 

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join3.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join3.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join3.q.out Thu Oct 30 12:31:47 2014
@@ -113,7 +113,6 @@ INSERT OVERWRITE TABLE dest1 SELECT src1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key = src3.key)
 INSERT OVERWRITE TABLE dest1 SELECT src1.key, src3.value
 POSTHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join30.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join30.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join30.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join30.q.out Thu Oct 30 12:31:47 2014
@@ -127,7 +127,6 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@src1
 PREHOOK: Output: default@dest_j1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 
 SELECT /*+ MAPJOIN(x) */ x.key, count(1) FROM src1 x JOIN src y ON (x.key = y.key) group by x.key
 POSTHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join31.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join31.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join31.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join31.q.out Thu Oct 30 12:31:47 2014
@@ -198,7 +198,6 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@src1
 PREHOOK: Output: default@dest_j1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 
 SELECT subq1.key, count(1) as cnt
 FROM (select x.key, count(1) as cnt from src1 x group by x.key) subq1 JOIN 

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join32.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join32.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join32.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join32.q.out Thu Oct 30 12:31:47 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out Thu Oct 30 12:31:47 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join33.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join33.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join33.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join33.q.out Thu Oct 30 12:31:47 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join34.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join34.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join34.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join34.q.out Thu Oct 30 12:31:47 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join35.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join35.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join35.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join35.q.out Thu Oct 30 12:31:47 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join36.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join36.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join36.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join36.q.out Thu Oct 30 12:31:47 2014
@@ -31,7 +31,6 @@ SELECT key, count(1) from src group by k
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@tmp1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE tmp1
 SELECT key, count(1) from src group by key
 POSTHOOK: type: QUERY
@@ -44,7 +43,6 @@ SELECT key, count(1) from src group by k
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@tmp2
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE tmp2
 SELECT key, count(1) from src group by key
 POSTHOOK: type: QUERY
@@ -79,29 +77,29 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: y
-                  Statistics: Num rows: 223 Data size: 1791 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 112 Data size: 899 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 155 Data size: 743 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: key (type: int)
                       sort order: +
                       Map-reduce partition columns: key (type: int)
-                      Statistics: Num rows: 112 Data size: 899 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 155 Data size: 743 Basic stats: COMPLETE Column stats: NONE
                       value expressions: cnt (type: int)
         Map 3 
             Map Operator Tree:
                 TableScan
                   alias: x
-                  Statistics: Num rows: 223 Data size: 1791 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 112 Data size: 899 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 155 Data size: 743 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: key (type: int)
                       sort order: +
                       Map-reduce partition columns: key (type: int)
-                      Statistics: Num rows: 112 Data size: 899 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 155 Data size: 743 Basic stats: COMPLETE Column stats: NONE
                       value expressions: cnt (type: int)
         Reducer 2 
             Reduce Operator Tree:
@@ -112,14 +110,14 @@ STAGE PLANS:
                   0 {KEY.reducesinkkey0} {VALUE._col0}
                   1 {VALUE._col0}
                 outputColumnNames: _col0, _col1, _col6
-                Statistics: Num rows: 123 Data size: 988 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 170 Data size: 817 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: int), _col1 (type: int), _col6 (type: int)
                   outputColumnNames: _col0, _col1, _col2
-                  Statistics: Num rows: 123 Data size: 988 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 170 Data size: 817 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 123 Data size: 988 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 170 Data size: 817 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -149,7 +147,6 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@tmp1
 PREHOOK: Input: default@tmp2
 PREHOOK: Output: default@dest_j1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 
 SELECT /*+ MAPJOIN(x) */ x.key, x.cnt, y.cnt
 FROM tmp1 x JOIN tmp2 y ON (x.key = y.key)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join37.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join37.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join37.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join37.q.out Thu Oct 30 12:31:47 2014
@@ -107,7 +107,6 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@src1
 PREHOOK: Output: default@dest_j1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1 
 SELECT /*+ MAPJOIN(X) */ x.key, x.value, y.value
 FROM src1 x JOIN src y ON (x.key = y.key)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join38.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join38.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join38.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join38.q.out Thu Oct 30 12:31:47 2014
@@ -10,7 +10,6 @@ PREHOOK: query: insert overwrite table t
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@tmp
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table tmp select key, cast(key + 1 as int), key +2, key+3, key+4, cast(key+5 as int), key+6, key+7, key+8, key+9, key+10, cast(key+11 as int) from src where key = 100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -65,7 +64,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: b
-                  Statistics: Num rows: 1 Data size: 128 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 2 Data size: 126 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (col11 is not null and (col11 = 111)) (type: boolean)
                     Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join39.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join39.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join39.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join39.q.out Thu Oct 30 12:31:47 2014
@@ -107,7 +107,6 @@ FROM src x left outer JOIN (select * fro
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest_j1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1
 SELECT /*+ MAPJOIN(y) */ x.key, x.value, y.key, y.value
 FROM src x left outer JOIN (select * from src where key <= 100) y ON (x.key = y.key)

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join4.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join4.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join4.q.out Thu Oct 30 12:31:47 2014
@@ -140,7 +140,6 @@ INSERT OVERWRITE TABLE dest1 SELECT c.c1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: FROM (
  FROM 
   (

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join41.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join41.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join41.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join41.q.out Thu Oct 30 12:31:47 2014
@@ -3,7 +3,6 @@ PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
 PREHOOK: Output: database:default
 PREHOOK: Output: default@s1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: create table s1 as select * from src where key = 0
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
@@ -30,26 +29,26 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: src2
-                  Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
+                  Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (key > 10) (type: boolean)
-                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: key (type: string)
                       sort order: +
                       Map-reduce partition columns: key (type: string)
-                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE
                       value expressions: value (type: string)
         Map 3 
             Map Operator Tree:
                 TableScan
                   alias: src1
-                  Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
+                  Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
                     key expressions: key (type: string)
                     sort order: +
                     Map-reduce partition columns: key (type: string)
-                    Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
+                    Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                     value expressions: value (type: string)
         Reducer 2 
             Reduce Operator Tree:
@@ -60,14 +59,14 @@ STAGE PLANS:
                   0 {KEY.reducesinkkey0} {VALUE._col0}
                   1 {KEY.reducesinkkey0} {VALUE._col0}
                 outputColumnNames: _col0, _col1, _col5, _col6
-                Statistics: Num rows: 0 Data size: 26 Basic stats: PARTIAL Column stats: NONE
+                Statistics: Num rows: 3 Data size: 23 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string)
                   outputColumnNames: _col0, _col1, _col2, _col3
-                  Statistics: Num rows: 0 Data size: 26 Basic stats: PARTIAL Column stats: NONE
+                  Statistics: Num rows: 3 Data size: 23 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 0 Data size: 26 Basic stats: PARTIAL Column stats: NONE
+                    Statistics: Num rows: 3 Data size: 23 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -113,26 +112,26 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: src2
-                  Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
+                  Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (key > 10) (type: boolean)
-                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: key (type: string)
                       sort order: +
                       Map-reduce partition columns: key (type: string)
-                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE
                       value expressions: value (type: string)
         Map 3 
             Map Operator Tree:
                 TableScan
                   alias: src1
-                  Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
+                  Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                   Reduce Output Operator
                     key expressions: key (type: string)
                     sort order: +
                     Map-reduce partition columns: key (type: string)
-                    Statistics: Num rows: 0 Data size: 24 Basic stats: PARTIAL Column stats: NONE
+                    Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE
                     value expressions: value (type: string)
         Reducer 2 
             Reduce Operator Tree:
@@ -143,14 +142,14 @@ STAGE PLANS:
                   0 {KEY.reducesinkkey0} {VALUE._col0}
                   1 {KEY.reducesinkkey0} {VALUE._col0}
                 outputColumnNames: _col0, _col1, _col5, _col6
-                Statistics: Num rows: 0 Data size: 26 Basic stats: PARTIAL Column stats: NONE
+                Statistics: Num rows: 3 Data size: 23 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: string), _col1 (type: string), _col5 (type: string), _col6 (type: string)
                   outputColumnNames: _col0, _col1, _col2, _col3
-                  Statistics: Num rows: 0 Data size: 26 Basic stats: PARTIAL Column stats: NONE
+                  Statistics: Num rows: 3 Data size: 23 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 0 Data size: 26 Basic stats: PARTIAL Column stats: NONE
+                    Statistics: Num rows: 3 Data size: 23 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join5.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join5.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join5.q.out Thu Oct 30 12:31:47 2014
@@ -140,7 +140,6 @@ INSERT OVERWRITE TABLE dest1 SELECT c.c1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: FROM (
  FROM 
   (

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join6.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join6.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join6.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join6.q.out Thu Oct 30 12:31:47 2014
@@ -140,7 +140,6 @@ INSERT OVERWRITE TABLE dest1 SELECT c.c1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: FROM (
  FROM 
   (

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join7.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join7.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join7.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join7.q.out Thu Oct 30 12:31:47 2014
@@ -175,7 +175,6 @@ INSERT OVERWRITE TABLE dest1 SELECT c.c1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: FROM (
  FROM 
   (

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join8.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join8.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join8.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join8.q.out Thu Oct 30 12:31:47 2014
@@ -143,7 +143,6 @@ INSERT OVERWRITE TABLE dest1 SELECT c.c1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@dest1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: FROM (
  FROM 
   (

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join9.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join9.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join9.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join9.q.out Thu Oct 30 12:31:47 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join_filters_overlap.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join_filters_overlap.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join_filters_overlap.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join_filters_overlap.q.out Thu Oct 30 12:31:47 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out Thu Oct 30 12:31:47 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join_nullsafe.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join_nullsafe.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join_nullsafe.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join_nullsafe.q.out Thu Oct 30 12:31:47 2014
@@ -653,8 +653,6 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@smb_input
 PREHOOK: Output: default@smb_input1
 PREHOOK: Output: default@smb_input2
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: from smb_input
 insert overwrite table smb_input1 select *
 insert overwrite table smb_input2 select *

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/join_rc.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/join_rc.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/join_rc.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/join_rc.q.out Thu Oct 30 12:31:47 2014
@@ -18,7 +18,6 @@ PREHOOK: query: insert overwrite table j
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@join_rc1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table join_rc1 select * from src
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -29,7 +28,6 @@ PREHOOK: query: insert overwrite table j
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@join_rc2
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table join_rc2 select * from src
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -59,29 +57,29 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: join_rc2
-                  Statistics: Num rows: 26 Data size: 5293 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 500 Data size: 4812 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 13 Data size: 2646 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: key (type: string)
                       sort order: +
                       Map-reduce partition columns: key (type: string)
-                      Statistics: Num rows: 13 Data size: 2646 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
                       value expressions: value (type: string)
         Map 3 
             Map Operator Tree:
                 TableScan
                   alias: join_rc1
-                  Statistics: Num rows: 52 Data size: 5293 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 500 Data size: 4812 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 26 Data size: 2646 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: key (type: string)
                       sort order: +
                       Map-reduce partition columns: key (type: string)
-                      Statistics: Num rows: 26 Data size: 2646 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
         Reducer 2 
             Reduce Operator Tree:
               Join Operator
@@ -91,14 +89,14 @@ STAGE PLANS:
                   0 {KEY.reducesinkkey0}
                   1 {VALUE._col0}
                 outputColumnNames: _col0, _col6
-                Statistics: Num rows: 28 Data size: 2910 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 275 Data size: 2646 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: string), _col6 (type: string)
                   outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 28 Data size: 2910 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 275 Data size: 2646 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 28 Data size: 2910 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 275 Data size: 2646 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part1.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part1.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part1.q.out Thu Oct 30 12:31:47 2014
@@ -151,8 +151,6 @@ PREHOOK: Input: default@srcpart@ds=2008-
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 PREHOOK: Output: default@nzhang_part1
 PREHOOK: Output: default@nzhang_part2@ds=2008-12-31
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: from srcpart
 insert overwrite table nzhang_part1 partition (ds, hr) select key, value, ds, hr where ds <= '2008-04-08'
 insert overwrite table nzhang_part2 partition(ds='2008-12-31', hr) select key, value, hr where ds > '2008-04-08'

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part10.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part10.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part10.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part10.q.out Thu Oct 30 12:31:47 2014
@@ -101,7 +101,6 @@ PREHOOK: Input: default@srcpart
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 PREHOOK: Output: default@nzhang_part10@ds=2008-12-31
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: from srcpart
 insert overwrite table nzhang_part10 partition(ds='2008-12-31', hr) select key, value, hr where ds > '2008-04-08'
 POSTHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part11.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part11.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part11.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part11.q.out Thu Oct 30 12:31:47 2014
@@ -42,7 +42,6 @@ PREHOOK: Input: default@srcpart@ds=2008-
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 PREHOOK: Output: default@nzhang_part@ds=2010-03-03
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table nzhang_part partition (ds="2010-03-03", hr) select key, value, hr from srcpart where ds is not null and hr is not null
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part12.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part12.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part12.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part12.q.out Thu Oct 30 12:31:47 2014
@@ -42,7 +42,6 @@ PREHOOK: Input: default@srcpart@ds=2008-
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 PREHOOK: Output: default@nzhang_part12@ds=2010-03-03
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table nzhang_part12 partition (ds="2010-03-03", hr) select key, value, cast(hr*2 as int) from srcpart where ds is not null and hr is not null
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part13.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part13.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part13.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part13.q.out Thu Oct 30 12:31:47 2014
@@ -141,7 +141,6 @@ select * from (
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@nzhang_part13@ds=2010-03-03
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table nzhang_part13 partition (ds="2010-03-03", hr) 
 select * from (
    select key, value, '22'

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part14.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part14.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part14.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part14.q.out Thu Oct 30 12:31:47 2014
@@ -200,7 +200,6 @@ select key, value from (
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@nzhang_part14
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table nzhang_part14 partition(value) 
 select key, value from (
   select 'k1' as key, cast(null as string) as value from src limit 2

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part15.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part15.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part15.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part15.q.out Thu Oct 30 12:31:47 2014
@@ -19,7 +19,6 @@ SELECT key, part_key FROM src LATERAL VI
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@load_dyn_part15_test
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT OVERWRITE TABLE load_dyn_part15_test PARTITION(part_key)
 SELECT key, part_key FROM src LATERAL VIEW explode(array("1","{2","3]")) myTable AS part_key
 POSTHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part2.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part2.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part2.q.out Thu Oct 30 12:31:47 2014
@@ -105,7 +105,6 @@ PREHOOK: Input: default@srcpart@ds=2008-
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 PREHOOK: Output: default@nzhang_part_bucket@ds=2010-03-23
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table nzhang_part_bucket partition (ds='2010-03-23', hr) select key, value, hr from srcpart where ds is not null and hr is not null
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part3.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part3.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part3.q.out Thu Oct 30 12:31:47 2014
@@ -100,7 +100,6 @@ PREHOOK: Input: default@srcpart@ds=2008-
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 PREHOOK: Output: default@nzhang_part3
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table nzhang_part3 partition (ds, hr) select key, value, ds, hr from srcpart where ds is not null and hr is not null
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part4.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part4.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part4.q.out Thu Oct 30 12:31:47 2014
@@ -42,7 +42,6 @@ PREHOOK: query: insert overwrite table n
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@nzhang_part4@ds=2008-04-08/hr=existing_value
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table nzhang_part4 partition (ds='2008-04-08', hr='existing_value') select key, value from src
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
@@ -111,7 +110,6 @@ PREHOOK: Input: default@srcpart@ds=2008-
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 PREHOOK: Output: default@nzhang_part4
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table nzhang_part4 partition (ds, hr) select key, value, ds, hr from srcpart where ds is not null and hr is not null
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part5.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part5.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part5.q.out Thu Oct 30 12:31:47 2014
@@ -78,7 +78,6 @@ PREHOOK: query: insert overwrite table n
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@nzhang_part5
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table nzhang_part5 partition (value) select key, value from src
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part6.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part6.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part6.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part6.q.out Thu Oct 30 12:31:47 2014
@@ -42,7 +42,6 @@ PREHOOK: Input: default@srcpart@ds=2008-
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12
 PREHOOK: Output: default@nzhang_part6@ds=2010-03-03
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table nzhang_part6 partition (ds="2010-03-03", hr) select key, value, hr from srcpart where ds is not null and hr is not null
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part7.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part7.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part7.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part7.q.out Thu Oct 30 12:31:47 2014
@@ -39,7 +39,6 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@srcpart
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
 PREHOOK: Output: default@nzhang_part7@ds=2010-03-03/hr=12
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: insert overwrite table nzhang_part7 partition (ds='2010-03-03', hr='12') select key, value from srcpart where ds = '2008-04-08' and hr = '12'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@srcpart

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out (original) and hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out Thu Oct 30 12:31:47 2014 differ

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part9.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part9.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part9.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/load_dyn_part9.q.out Thu Oct 30 12:31:47 2014
@@ -101,7 +101,6 @@ PREHOOK: Input: default@srcpart
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
 PREHOOK: Output: default@nzhang_part9
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: from srcpart
 insert overwrite table nzhang_part9 partition (ds, hr) select key, value, ds, hr where ds <= '2008-04-08'
 POSTHOOK: type: QUERY

Modified: hive/branches/spark/ql/src/test/results/clientpositive/spark/mapjoin_decimal.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/mapjoin_decimal.q.out?rev=1635477&r1=1635476&r2=1635477&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/mapjoin_decimal.q.out (original)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/mapjoin_decimal.q.out Thu Oct 30 12:31:47 2014
@@ -50,7 +50,6 @@ PREHOOK: query: INSERT INTO TABLE t1 sel
 PREHOOK: type: QUERY
 PREHOOK: Input: default@over1k
 PREHOOK: Output: default@t1
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT INTO TABLE t1 select dec from over1k
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1k
@@ -68,7 +67,6 @@ PREHOOK: query: INSERT INTO TABLE t2 sel
 PREHOOK: type: QUERY
 PREHOOK: Input: default@over1k
 PREHOOK: Output: default@t2
-[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
 POSTHOOK: query: INSERT INTO TABLE t2 select dec from over1k
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@over1k
@@ -95,28 +93,28 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: t2
-                  Statistics: Num rows: 12 Data size: 1359 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1049 Data size: 117488 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: dec is not null (type: boolean)
-                    Statistics: Num rows: 6 Data size: 679 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 525 Data size: 58800 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: dec (type: decimal(4,0))
                       sort order: +
                       Map-reduce partition columns: dec (type: decimal(4,0))
-                      Statistics: Num rows: 6 Data size: 679 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 525 Data size: 58800 Basic stats: COMPLETE Column stats: NONE
         Map 3 
             Map Operator Tree:
                 TableScan
                   alias: t1
-                  Statistics: Num rows: 21 Data size: 2422 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1049 Data size: 117488 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: dec is not null (type: boolean)
-                    Statistics: Num rows: 11 Data size: 1268 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 525 Data size: 58800 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
                       key expressions: dec (type: decimal(4,2))
                       sort order: +
                       Map-reduce partition columns: dec (type: decimal(4,2))
-                      Statistics: Num rows: 11 Data size: 1268 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 525 Data size: 58800 Basic stats: COMPLETE Column stats: NONE
         Reducer 2 
             Reduce Operator Tree:
               Join Operator
@@ -126,14 +124,14 @@ STAGE PLANS:
                   0 {KEY.reducesinkkey0}
                   1 {KEY.reducesinkkey0}
                 outputColumnNames: _col0, _col4
-                Statistics: Num rows: 12 Data size: 1394 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 577 Data size: 64680 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
                   expressions: _col0 (type: decimal(4,2)), _col4 (type: decimal(4,0))
                   outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 12 Data size: 1394 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 577 Data size: 64680 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 12 Data size: 1394 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 577 Data size: 64680 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat