You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2018/03/29 05:42:48 UTC

[06/13] hive git commit: HIVE-18770: Additional tests and fixes for materialized view rewriting (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

http://git-wip-us.apache.org/repos/asf/hive/blob/1b01f9c4/ql/src/test/results/clientpositive/materialized_view_rewrite_8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/materialized_view_rewrite_8.q.out b/ql/src/test/results/clientpositive/materialized_view_rewrite_8.q.out
new file mode 100644
index 0000000..1ca06d3
--- /dev/null
+++ b/ql/src/test/results/clientpositive/materialized_view_rewrite_8.q.out
@@ -0,0 +1,642 @@
+PREHOOK: query: create table if not exists source_table_001 (
+MY_DATE date,
+MY_ID bigint,
+MY_ID2 bigint,
+ENVIRONMENT string,
+DOWN_VOLUME bigint,
+UP_VOLUME bigint
+)
+stored AS ORC
+TBLPROPERTIES("transactional"="true")
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@source_table_001
+POSTHOOK: query: create table if not exists source_table_001 (
+MY_DATE date,
+MY_ID bigint,
+MY_ID2 bigint,
+ENVIRONMENT string,
+DOWN_VOLUME bigint,
+UP_VOLUME bigint
+)
+stored AS ORC
+TBLPROPERTIES("transactional"="true")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@source_table_001
+PREHOOK: query: insert into table source_table_001
+  values ('2010-10-10', 1, 1, 'env', 1, 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@source_table_001
+POSTHOOK: query: insert into table source_table_001
+  values ('2010-10-10', 1, 1, 'env', 1, 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@source_table_001
+POSTHOOK: Lineage: source_table_001.down_volume SCRIPT []
+POSTHOOK: Lineage: source_table_001.environment SCRIPT []
+POSTHOOK: Lineage: source_table_001.my_date SCRIPT []
+POSTHOOK: Lineage: source_table_001.my_id SCRIPT []
+POSTHOOK: Lineage: source_table_001.my_id2 SCRIPT []
+POSTHOOK: Lineage: source_table_001.up_volume SCRIPT []
+PREHOOK: query: analyze table source_table_001 compute statistics for columns
+PREHOOK: type: QUERY
+PREHOOK: Input: default@source_table_001
+PREHOOK: Output: default@source_table_001
+#### A masked pattern was here ####
+POSTHOOK: query: analyze table source_table_001 compute statistics for columns
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@source_table_001
+POSTHOOK: Output: default@source_table_001
+#### A masked pattern was here ####
+PREHOOK: query: CREATE MATERIALIZED VIEW source_table_001_mv ENABLE REWRITE AS
+SELECT
+SUM(A.DOWN_VOLUME) AS DOWN_VOLUME_SUM,
+SUM(A.UP_VOLUME) AS UP_VOLUME_SUM,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+from source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+PREHOOK: type: CREATE_MATERIALIZED_VIEW
+PREHOOK: Input: default@source_table_001
+PREHOOK: Output: database:default
+PREHOOK: Output: default@source_table_001_mv
+POSTHOOK: query: CREATE MATERIALIZED VIEW source_table_001_mv ENABLE REWRITE AS
+SELECT
+SUM(A.DOWN_VOLUME) AS DOWN_VOLUME_SUM,
+SUM(A.UP_VOLUME) AS UP_VOLUME_SUM,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+from source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+POSTHOOK: type: CREATE_MATERIALIZED_VIEW
+POSTHOOK: Input: default@source_table_001
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@source_table_001_mv
+PREHOOK: query: analyze table source_table_001_mv compute statistics for columns
+PREHOOK: type: QUERY
+PREHOOK: Input: default@source_table_001_mv
+PREHOOK: Output: default@source_table_001_mv
+#### A masked pattern was here ####
+POSTHOOK: query: analyze table source_table_001_mv compute statistics for columns
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@source_table_001_mv
+POSTHOOK: Output: default@source_table_001_mv
+#### A masked pattern was here ####
+PREHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: default.source_table_001_mv
+          Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: down_volume_sum (type: bigint), my_date (type: date), my_id2 (type: bigint), environment (type: string)
+            outputColumnNames: _col0, _col1, _col2, _col3
+            Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+            ListSink
+
+PREHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+LIMIT 100
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+LIMIT 100
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: 100
+      Processor Tree:
+        TableScan
+          alias: default.source_table_001_mv
+          Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: down_volume_sum (type: bigint), my_date (type: date), my_id2 (type: bigint), environment (type: string)
+            outputColumnNames: _col0, _col1, _col2, _col3
+            Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+            Limit
+              Number of rows: 100
+              Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+              ListSink
+
+PREHOOK: query: explain
+select
+1,
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+1,
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: default.source_table_001_mv
+          Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: 1 (type: int), down_volume_sum (type: bigint), my_date (type: date), my_id2 (type: bigint), environment (type: string)
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4
+            Statistics: Num rows: 1 Data size: 163 Basic stats: COMPLETE Column stats: COMPLETE
+            ListSink
+
+PREHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) + 0 AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) + 0 AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: default.source_table_001_mv
+            Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: (down_volume_sum + 0L) (type: bigint), my_date (type: date), my_id2 (type: bigint), environment (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+ORDER BY A.MY_ID2 
+LIMIT 100
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+ORDER BY A.MY_ID2 
+LIMIT 100
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: default.source_table_001_mv
+            Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: down_volume_sum (type: bigint), my_date (type: date), my_id2 (type: bigint), environment (type: string)
+              outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+              Reduce Output Operator
+                key expressions: _col2 (type: bigint)
+                sort order: +
+                Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+                TopN Hash Memory Usage: 0.1
+                value expressions: _col0 (type: bigint), _col1 (type: date), _col3 (type: string)
+      Reduce Operator Tree:
+        Select Operator
+          expressions: VALUE._col0 (type: bigint), VALUE._col1 (type: date), KEY.reducesinkkey0 (type: bigint), VALUE._col2 (type: string)
+          outputColumnNames: _col0, _col1, _col2, _col3
+          Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+          Limit
+            Number of rows: 100
+            Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 100
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: explain
+select
+distinct A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+distinct A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: default.source_table_001_mv
+            Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: my_date (type: date), my_id2 (type: bigint), environment (type: string)
+              outputColumnNames: my_date, my_id2, environment
+              Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+              Group By Operator
+                keys: my_date (type: date), my_id2 (type: bigint), environment (type: string)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col0 (type: date), _col1 (type: bigint), _col2 (type: string)
+                  sort order: +++
+                  Map-reduce partition columns: _col0 (type: date), _col1 (type: bigint), _col2 (type: string)
+                  Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+      Reduce Operator Tree:
+        Group By Operator
+          keys: KEY._col0 (type: date), KEY._col1 (type: bigint), KEY._col2 (type: string)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+where A.MY_DATE=TO_DATE('2010-01-10')
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+where A.MY_DATE=TO_DATE('2010-01-10')
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: default.source_table_001_mv
+            Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+            Filter Operator
+              predicate: (my_date = DATE'2010-01-10') (type: boolean)
+              Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+              Select Operator
+                expressions: down_volume_sum (type: bigint), DATE'2010-01-10' (type: date), my_id2 (type: bigint), environment (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) + SUM(A.UP_VOLUME) AS TOTAL_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+where A.MY_DATE=TO_DATE('2010-01-10')
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) + SUM(A.UP_VOLUME) AS TOTAL_VOLUME_BYTES,
+A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+where A.MY_DATE=TO_DATE('2010-01-10')
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: default.source_table_001_mv
+            Statistics: Num rows: 1 Data size: 167 Basic stats: COMPLETE Column stats: COMPLETE
+            Filter Operator
+              predicate: (my_date = DATE'2010-01-10') (type: boolean)
+              Statistics: Num rows: 1 Data size: 167 Basic stats: COMPLETE Column stats: COMPLETE
+              Select Operator
+                expressions: (down_volume_sum + up_volume_sum) (type: bigint), DATE'2010-01-10' (type: date), my_id2 (type: bigint), environment (type: string)
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 159 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES
+FROM source_table_001 AS A
+where A.MY_DATE=TO_DATE('2010-01-10')
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES
+FROM source_table_001 AS A
+where A.MY_DATE=TO_DATE('2010-01-10')
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: default.source_table_001_mv
+            Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+            Filter Operator
+              predicate: (my_date = DATE'2010-01-10') (type: boolean)
+              Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+              Select Operator
+                expressions: down_volume_sum (type: bigint)
+                outputColumnNames: down_volume_sum
+                Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+                Group By Operator
+                  aggregations: sum(down_volume_sum)
+                  mode: hash
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    sort order: 
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: sum(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+TO_DATE('2010-01-10')
+FROM source_table_001 AS A
+where A.MY_DATE=TO_DATE('2010-01-10')
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+TO_DATE('2010-01-10')
+FROM source_table_001 AS A
+where A.MY_DATE=TO_DATE('2010-01-10')
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: default.source_table_001_mv
+            Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+            Filter Operator
+              predicate: (my_date = DATE'2010-01-10') (type: boolean)
+              Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+              Select Operator
+                expressions: down_volume_sum (type: bigint)
+                outputColumnNames: down_volume_sum
+                Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+                Group By Operator
+                  aggregations: sum(down_volume_sum)
+                  mode: hash
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    sort order: 
+                    Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col0 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: sum(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: _col0 (type: bigint), DATE'2010-01-10' (type: date)
+            outputColumnNames: _col0, _col1
+            Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE
+FROM source_table_001 AS A
+where A.MY_DATE=TO_DATE('2010-01-10')
+group by A.MY_DATE
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+A.MY_DATE
+FROM source_table_001 AS A
+where A.MY_DATE=TO_DATE('2010-01-10')
+group by A.MY_DATE
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: default.source_table_001_mv
+            Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+            Filter Operator
+              predicate: (my_date = DATE'2010-01-10') (type: boolean)
+              Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+              Select Operator
+                expressions: down_volume_sum (type: bigint)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+                Group By Operator
+                  aggregations: sum(_col0)
+                  keys: DATE'2010-01-10' (type: date)
+                  mode: hash
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: date)
+                    sort order: +
+                    Map-reduce partition columns: _col0 (type: date)
+                    Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+                    value expressions: _col1 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: sum(VALUE._col0)
+          keys: KEY._col0 (type: date)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: _col1 (type: bigint), DATE'2010-01-10' (type: date)
+            outputColumnNames: _col0, _col1
+            Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 1 Data size: 64 Basic stats: COMPLETE Column stats: COMPLETE
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: drop materialized view source_table_001_mv
+PREHOOK: type: DROP_MATERIALIZED_VIEW
+PREHOOK: Input: default@source_table_001_mv
+PREHOOK: Output: default@source_table_001_mv
+POSTHOOK: query: drop materialized view source_table_001_mv
+POSTHOOK: type: DROP_MATERIALIZED_VIEW
+POSTHOOK: Input: default@source_table_001_mv
+POSTHOOK: Output: default@source_table_001_mv

http://git-wip-us.apache.org/repos/asf/hive/blob/1b01f9c4/ql/src/test/results/clientpositive/materialized_view_rewrite_9.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/materialized_view_rewrite_9.q.out b/ql/src/test/results/clientpositive/materialized_view_rewrite_9.q.out
new file mode 100644
index 0000000..3120e0d
--- /dev/null
+++ b/ql/src/test/results/clientpositive/materialized_view_rewrite_9.q.out
@@ -0,0 +1,361 @@
+PREHOOK: query: create table if not exists source_table_001 (
+MY_DATE timestamp,
+MY_ID bigint,
+MY_ID2 bigint,
+ENVIRONMENT string,
+DOWN_VOLUME bigint,
+UP_VOLUME bigint
+)
+stored AS ORC
+TBLPROPERTIES("transactional"="true")
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@source_table_001
+POSTHOOK: query: create table if not exists source_table_001 (
+MY_DATE timestamp,
+MY_ID bigint,
+MY_ID2 bigint,
+ENVIRONMENT string,
+DOWN_VOLUME bigint,
+UP_VOLUME bigint
+)
+stored AS ORC
+TBLPROPERTIES("transactional"="true")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@source_table_001
+PREHOOK: query: insert into table source_table_001
+  values ('2010-10-10 00:00:00', 1, 1, 'env', 1, 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@source_table_001
+POSTHOOK: query: insert into table source_table_001
+  values ('2010-10-10 00:00:00', 1, 1, 'env', 1, 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@source_table_001
+POSTHOOK: Lineage: source_table_001.down_volume SCRIPT []
+POSTHOOK: Lineage: source_table_001.environment SCRIPT []
+POSTHOOK: Lineage: source_table_001.my_date SCRIPT []
+POSTHOOK: Lineage: source_table_001.my_id SCRIPT []
+POSTHOOK: Lineage: source_table_001.my_id2 SCRIPT []
+POSTHOOK: Lineage: source_table_001.up_volume SCRIPT []
+PREHOOK: query: analyze table source_table_001 compute statistics for columns
+PREHOOK: type: QUERY
+PREHOOK: Input: default@source_table_001
+PREHOOK: Output: default@source_table_001
+#### A masked pattern was here ####
+POSTHOOK: query: analyze table source_table_001 compute statistics for columns
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@source_table_001
+POSTHOOK: Output: default@source_table_001
+#### A masked pattern was here ####
+PREHOOK: query: CREATE MATERIALIZED VIEW source_table_001_mv ENABLE REWRITE AS
+SELECT
+SUM(A.DOWN_VOLUME) AS DOWN_VOLUME_SUM,
+SUM(A.UP_VOLUME) AS UP_VOLUME_SUM,
+A.MY_ID,A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+from source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+PREHOOK: type: CREATE_MATERIALIZED_VIEW
+PREHOOK: Input: default@source_table_001
+PREHOOK: Output: database:default
+PREHOOK: Output: default@source_table_001_mv
+POSTHOOK: query: CREATE MATERIALIZED VIEW source_table_001_mv ENABLE REWRITE AS
+SELECT
+SUM(A.DOWN_VOLUME) AS DOWN_VOLUME_SUM,
+SUM(A.UP_VOLUME) AS UP_VOLUME_SUM,
+A.MY_ID,A.MY_DATE,A.MY_ID2,A.ENVIRONMENT
+from source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,A.MY_DATE
+POSTHOOK: type: CREATE_MATERIALIZED_VIEW
+POSTHOOK: Input: default@source_table_001
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@source_table_001_mv
+PREHOOK: query: analyze table source_table_001_mv compute statistics for columns
+PREHOOK: type: QUERY
+PREHOOK: Input: default@source_table_001_mv
+PREHOOK: Output: default@source_table_001_mv
+#### A masked pattern was here ####
+POSTHOOK: query: analyze table source_table_001_mv compute statistics for columns
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@source_table_001_mv
+POSTHOOK: Output: default@source_table_001_mv
+#### A masked pattern was here ####
+PREHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+FLOOR(A.MY_DATE to hour),A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,FLOOR(A.MY_DATE to hour)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+FLOOR(A.MY_DATE to hour),A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,FLOOR(A.MY_DATE to hour)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: default.source_table_001_mv
+            Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: down_volume_sum (type: bigint), my_id (type: bigint), my_id2 (type: bigint), environment (type: string), floor_hour(my_date) (type: timestamp)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4
+              Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+              Group By Operator
+                aggregations: sum(_col0)
+                keys: _col1 (type: bigint), _col2 (type: bigint), _col3 (type: string), _col4 (type: timestamp)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4
+                Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: string), _col3 (type: timestamp)
+                  sort order: ++++
+                  Map-reduce partition columns: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: string), _col3 (type: timestamp)
+                  Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col4 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: sum(VALUE._col0)
+          keys: KEY._col0 (type: bigint), KEY._col1 (type: bigint), KEY._col2 (type: string), KEY._col3 (type: timestamp)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4
+          Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: _col4 (type: bigint), _col3 (type: timestamp), _col1 (type: bigint), _col2 (type: string)
+            outputColumnNames: _col0, _col1, _col2, _col3
+            Statistics: Num rows: 1 Data size: 143 Basic stats: COMPLETE Column stats: COMPLETE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 1 Data size: 143 Basic stats: COMPLETE Column stats: COMPLETE
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: DROP MATERIALIZED VIEW source_table_001_mv
+PREHOOK: type: DROP_MATERIALIZED_VIEW
+PREHOOK: Input: default@source_table_001_mv
+PREHOOK: Output: default@source_table_001_mv
+POSTHOOK: query: DROP MATERIALIZED VIEW source_table_001_mv
+POSTHOOK: type: DROP_MATERIALIZED_VIEW
+POSTHOOK: Input: default@source_table_001_mv
+POSTHOOK: Output: default@source_table_001_mv
+PREHOOK: query: CREATE MATERIALIZED VIEW source_table_001_mv ENABLE REWRITE AS
+SELECT
+SUM(A.DOWN_VOLUME) AS DOWN_VOLUME_SUM,
+SUM(A.UP_VOLUME) AS UP_VOLUME_SUM,
+A.MY_ID,FLOOR(A.MY_DATE to hour),A.MY_ID2,A.ENVIRONMENT
+from source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,FLOOR(A.MY_DATE to hour)
+PREHOOK: type: CREATE_MATERIALIZED_VIEW
+PREHOOK: Input: default@source_table_001
+PREHOOK: Output: database:default
+PREHOOK: Output: default@source_table_001_mv
+POSTHOOK: query: CREATE MATERIALIZED VIEW source_table_001_mv ENABLE REWRITE AS
+SELECT
+SUM(A.DOWN_VOLUME) AS DOWN_VOLUME_SUM,
+SUM(A.UP_VOLUME) AS UP_VOLUME_SUM,
+A.MY_ID,FLOOR(A.MY_DATE to hour),A.MY_ID2,A.ENVIRONMENT
+from source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,FLOOR(A.MY_DATE to hour)
+POSTHOOK: type: CREATE_MATERIALIZED_VIEW
+POSTHOOK: Input: default@source_table_001
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@source_table_001_mv
+PREHOOK: query: analyze table source_table_001_mv compute statistics for columns
+PREHOOK: type: QUERY
+PREHOOK: Input: default@source_table_001_mv
+PREHOOK: Output: default@source_table_001_mv
+#### A masked pattern was here ####
+POSTHOOK: query: analyze table source_table_001_mv compute statistics for columns
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@source_table_001_mv
+POSTHOOK: Output: default@source_table_001_mv
+#### A masked pattern was here ####
+PREHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+FLOOR(A.MY_DATE to day),A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,FLOOR(A.MY_DATE to day)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+FLOOR(A.MY_DATE to day),A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,FLOOR(A.MY_DATE to day)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: default.source_table_001_mv
+            Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: down_volume_sum (type: bigint), my_id (type: bigint), my_id2 (type: bigint), environment (type: string), floor_day(_c3) (type: timestamp)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4
+              Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+              Group By Operator
+                aggregations: sum(_col0)
+                keys: _col1 (type: bigint), _col2 (type: bigint), _col3 (type: string), _col4 (type: timestamp)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4
+                Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: string), _col3 (type: timestamp)
+                  sort order: ++++
+                  Map-reduce partition columns: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: string), _col3 (type: timestamp)
+                  Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col4 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: sum(VALUE._col0)
+          keys: KEY._col0 (type: bigint), KEY._col1 (type: bigint), KEY._col2 (type: string), KEY._col3 (type: timestamp)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4
+          Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: _col4 (type: bigint), _col3 (type: timestamp), _col1 (type: bigint), _col2 (type: string)
+            outputColumnNames: _col0, _col1, _col2, _col3
+            Statistics: Num rows: 1 Data size: 143 Basic stats: COMPLETE Column stats: COMPLETE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 1 Data size: 143 Basic stats: COMPLETE Column stats: COMPLETE
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+FLOOR(A.MY_DATE to hour),A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,FLOOR(A.MY_DATE to hour)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+FLOOR(A.MY_DATE to hour),A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,FLOOR(A.MY_DATE to hour)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: default.source_table_001_mv
+          Statistics: Num rows: 1 Data size: 143 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: down_volume_sum (type: bigint), _c3 (type: timestamp), my_id2 (type: bigint), environment (type: string)
+            outputColumnNames: _col0, _col1, _col2, _col3
+            Statistics: Num rows: 1 Data size: 143 Basic stats: COMPLETE Column stats: COMPLETE
+            ListSink
+
+PREHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+FLOOR(A.MY_DATE to second),A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,FLOOR(A.MY_DATE to second)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+SUM(A.DOWN_VOLUME) AS DOWNLOAD_VOLUME_BYTES,
+FLOOR(A.MY_DATE to second),A.MY_ID2,A.ENVIRONMENT
+FROM source_table_001 AS A
+group by A.MY_ID,A.MY_ID2,A.ENVIRONMENT,FLOOR(A.MY_DATE to second)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: a
+            Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+            Select Operator
+              expressions: my_id (type: bigint), my_id2 (type: bigint), environment (type: string), floor_second(my_date) (type: timestamp), down_volume (type: bigint)
+              outputColumnNames: _col0, _col1, _col2, _col3, _col4
+              Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+              Group By Operator
+                aggregations: sum(_col4)
+                keys: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: string), _col3 (type: timestamp)
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4
+                Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+                Reduce Output Operator
+                  key expressions: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: string), _col3 (type: timestamp)
+                  sort order: ++++
+                  Map-reduce partition columns: _col0 (type: bigint), _col1 (type: bigint), _col2 (type: string), _col3 (type: timestamp)
+                  Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+                  value expressions: _col4 (type: bigint)
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: sum(VALUE._col0)
+          keys: KEY._col0 (type: bigint), KEY._col1 (type: bigint), KEY._col2 (type: string), KEY._col3 (type: timestamp)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4
+          Statistics: Num rows: 1 Data size: 151 Basic stats: COMPLETE Column stats: COMPLETE
+          Select Operator
+            expressions: _col4 (type: bigint), _col3 (type: timestamp), _col1 (type: bigint), _col2 (type: string)
+            outputColumnNames: _col0, _col1, _col2, _col3
+            Statistics: Num rows: 1 Data size: 143 Basic stats: COMPLETE Column stats: COMPLETE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 1 Data size: 143 Basic stats: COMPLETE Column stats: COMPLETE
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: DROP MATERIALIZED VIEW source_table_001_mv
+PREHOOK: type: DROP_MATERIALIZED_VIEW
+PREHOOK: Input: default@source_table_001_mv
+PREHOOK: Output: default@source_table_001_mv
+POSTHOOK: query: DROP MATERIALIZED VIEW source_table_001_mv
+POSTHOOK: type: DROP_MATERIALIZED_VIEW
+POSTHOOK: Input: default@source_table_001_mv
+POSTHOOK: Output: default@source_table_001_mv

http://git-wip-us.apache.org/repos/asf/hive/blob/1b01f9c4/ql/src/test/results/clientpositive/outer_reference_windowed.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/outer_reference_windowed.q.out b/ql/src/test/results/clientpositive/outer_reference_windowed.q.out
index aef80c5..3d918b7 100644
--- a/ql/src/test/results/clientpositive/outer_reference_windowed.q.out
+++ b/ql/src/test/results/clientpositive/outer_reference_windowed.q.out
@@ -578,36 +578,36 @@ STAGE PLANS:
     Map Reduce
       Map Operator Tree:
           TableScan
-            alias: e011_03
-            Statistics: Num rows: 4 Data size: 36 Basic stats: COMPLETE Column stats: NONE
+            alias: e011_01
+            Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: c1 is not null (type: boolean)
-              Statistics: Num rows: 4 Data size: 36 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
               Select Operator
-                expressions: c1 (type: decimal(15,2)), c2 (type: decimal(15,2))
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 4 Data size: 36 Basic stats: COMPLETE Column stats: NONE
+                expressions: c1 (type: decimal(15,2))
+                outputColumnNames: _col0
+                Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: decimal(15,2))
                   sort order: +
                   Map-reduce partition columns: _col0 (type: decimal(15,2))
-                  Statistics: Num rows: 4 Data size: 36 Basic stats: COMPLETE Column stats: NONE
-                  value expressions: _col1 (type: decimal(15,2))
+                  Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
           TableScan
-            alias: e011_01
-            Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+            alias: e011_03
+            Statistics: Num rows: 4 Data size: 36 Basic stats: COMPLETE Column stats: NONE
             Filter Operator
               predicate: c1 is not null (type: boolean)
-              Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 36 Basic stats: COMPLETE Column stats: NONE
               Select Operator
-                expressions: c1 (type: decimal(15,2))
-                outputColumnNames: _col0
-                Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+                expressions: c1 (type: decimal(15,2)), c2 (type: decimal(15,2))
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 4 Data size: 36 Basic stats: COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: decimal(15,2))
                   sort order: +
                   Map-reduce partition columns: _col0 (type: decimal(15,2))
-                  Statistics: Num rows: 4 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 4 Data size: 36 Basic stats: COMPLETE Column stats: NONE
+                  value expressions: _col1 (type: decimal(15,2))
       Reduce Operator Tree:
         Join Operator
           condition map:
@@ -616,13 +616,13 @@ STAGE PLANS:
             0 _col0 (type: decimal(15,2))
             1 _col0 (type: decimal(15,2))
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 4 Data size: 39 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 4 Data size: 13 Basic stats: COMPLETE Column stats: NONE
           Group By Operator
-            aggregations: sum(_col2)
-            keys: _col0 (type: decimal(15,2)), _col1 (type: decimal(15,2))
+            aggregations: sum(_col0)
+            keys: _col1 (type: decimal(15,2)), _col2 (type: decimal(15,2))
             mode: hash
             outputColumnNames: _col0, _col1, _col2
-            Statistics: Num rows: 4 Data size: 39 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 4 Data size: 13 Basic stats: COMPLETE Column stats: NONE
             File Output Operator
               compressed: false
               table:
@@ -638,7 +638,7 @@ STAGE PLANS:
               key expressions: _col0 (type: decimal(15,2)), _col1 (type: decimal(15,2))
               sort order: ++
               Map-reduce partition columns: _col0 (type: decimal(15,2)), _col1 (type: decimal(15,2))
-              Statistics: Num rows: 4 Data size: 39 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 4 Data size: 13 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col2 (type: decimal(25,2))
       Reduce Operator Tree:
         Group By Operator
@@ -646,7 +646,7 @@ STAGE PLANS:
           keys: KEY._col0 (type: decimal(15,2)), KEY._col1 (type: decimal(15,2))
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 2 Data size: 19 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
           File Output Operator
             compressed: false
             table:
@@ -662,13 +662,13 @@ STAGE PLANS:
               key expressions: _col1 (type: decimal(15,2)), _col0 (type: decimal(15,2))
               sort order: ++
               Map-reduce partition columns: _col1 (type: decimal(15,2))
-              Statistics: Num rows: 2 Data size: 19 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
               value expressions: _col2 (type: decimal(25,2))
       Reduce Operator Tree:
         Select Operator
           expressions: KEY.reducesinkkey1 (type: decimal(15,2)), KEY.reducesinkkey0 (type: decimal(15,2)), VALUE._col0 (type: decimal(25,2))
           outputColumnNames: _col0, _col1, _col2
-          Statistics: Num rows: 2 Data size: 19 Basic stats: COMPLETE Column stats: NONE
+          Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
           PTF Operator
             Function definitions:
                 Input definition
@@ -688,14 +688,14 @@ STAGE PLANS:
                         name: sum
                         window function: GenericUDAFSumHiveDecimal
                         window frame: RANGE PRECEDING(MAX)~CURRENT
-            Statistics: Num rows: 2 Data size: 19 Basic stats: COMPLETE Column stats: NONE
+            Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
             Select Operator
               expressions: sum_window_0 (type: decimal(35,2))
               outputColumnNames: _col0
-              Statistics: Num rows: 2 Data size: 19 Basic stats: COMPLETE Column stats: NONE
+              Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 2 Data size: 19 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
                 table:
                     input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -821,41 +821,37 @@ STAGE PLANS:
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2
           Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-          Select Operator
-            expressions: _col1 (type: decimal(15,2)), _col0 (type: decimal(15,2)), _col2 (type: double)
-            outputColumnNames: _col0, _col1, _col2
+          PTF Operator
+            Function definitions:
+                Input definition
+                  input alias: ptf_0
+                  output shape: _col0: decimal(15,2), _col1: decimal(15,2), _col2: double
+                  type: WINDOWING
+                Windowing table definition
+                  input alias: ptf_1
+                  name: windowingtablefunction
+                  order by: _col1 ASC NULLS FIRST
+                  partition by: _col0
+                  raw input shape:
+                  window functions:
+                      window function definition
+                        alias: sum_window_0
+                        arguments: _col2
+                        name: sum
+                        window function: GenericUDAFSumDouble
+                        window frame: RANGE PRECEDING(MAX)~CURRENT
             Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-            PTF Operator
-              Function definitions:
-                  Input definition
-                    input alias: ptf_0
-                    output shape: _col0: decimal(15,2), _col1: decimal(15,2), _col2: double
-                    type: WINDOWING
-                  Windowing table definition
-                    input alias: ptf_1
-                    name: windowingtablefunction
-                    order by: _col0 ASC NULLS FIRST
-                    partition by: _col1
-                    raw input shape:
-                    window functions:
-                        window function definition
-                          alias: sum_window_0
-                          arguments: _col2
-                          name: sum
-                          window function: GenericUDAFSumDouble
-                          window frame: RANGE PRECEDING(MAX)~CURRENT
+            Select Operator
+              expressions: sum_window_0 (type: double)
+              outputColumnNames: _col0
               Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-              Select Operator
-                expressions: sum_window_0 (type: double)
-                outputColumnNames: _col0
+              File Output Operator
+                compressed: false
                 Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
     Fetch Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/1b01f9c4/ql/src/test/results/clientpositive/perf/spark/query12.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query12.q.out b/ql/src/test/results/clientpositive/perf/spark/query12.q.out
index 411ad88..af21058 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query12.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query12.q.out
@@ -178,20 +178,16 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                 Statistics: Num rows: 87121617 Data size: 11846020363 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col2 (type: string), _col3 (type: string), _col0 (type: string), _col1 (type: string), _col4 (type: decimal(7,2)), _col5 (type: decimal(17,2))
-                  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Reduce Output Operator
+                  key expressions: _col1 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col1 (type: string)
                   Statistics: Num rows: 87121617 Data size: 11846020363 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col3 (type: string)
-                    sort order: +
-                    Map-reduce partition columns: _col3 (type: string)
-                    Statistics: Num rows: 87121617 Data size: 11846020363 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col4 (type: decimal(7,2)), _col5 (type: decimal(17,2))
+                  value expressions: _col0 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: decimal(7,2)), _col5 (type: decimal(17,2))
         Reducer 4 
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col3 (type: decimal(7,2)), VALUE._col4 (type: decimal(17,2))
+                expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: decimal(7,2)), VALUE._col4 (type: decimal(17,2))
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                 Statistics: Num rows: 87121617 Data size: 11846020363 Basic stats: COMPLETE Column stats: NONE
                 PTF Operator
@@ -203,8 +199,8 @@ STAGE PLANS:
                       Windowing table definition
                         input alias: ptf_1
                         name: windowingtablefunction
-                        order by: _col3 ASC NULLS FIRST
-                        partition by: _col3
+                        order by: _col1 ASC NULLS FIRST
+                        partition by: _col1
                         raw input shape:
                         window functions:
                             window function definition
@@ -215,7 +211,7 @@ STAGE PLANS:
                               window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX)
                   Statistics: Num rows: 87121617 Data size: 11846020363 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: decimal(7,2)), _col5 (type: decimal(17,2)), ((_col5 * 100) / sum_window_0) (type: decimal(38,17)), _col0 (type: string)
+                    expressions: _col3 (type: string), _col0 (type: string), _col1 (type: string), _col4 (type: decimal(7,2)), _col5 (type: decimal(17,2)), ((_col5 * 100) / sum_window_0) (type: decimal(38,17)), _col2 (type: string)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
                     Statistics: Num rows: 87121617 Data size: 11846020363 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/1b01f9c4/ql/src/test/results/clientpositive/perf/spark/query20.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query20.q.out b/ql/src/test/results/clientpositive/perf/spark/query20.q.out
index 8aa57e1..9097b35 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query20.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query20.q.out
@@ -170,20 +170,16 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                 Statistics: Num rows: 174233858 Data size: 23594764438 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col2 (type: string), _col3 (type: string), _col0 (type: string), _col1 (type: string), _col4 (type: decimal(7,2)), _col5 (type: decimal(17,2))
-                  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+                Reduce Output Operator
+                  key expressions: _col1 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col1 (type: string)
                   Statistics: Num rows: 174233858 Data size: 23594764438 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col3 (type: string)
-                    sort order: +
-                    Map-reduce partition columns: _col3 (type: string)
-                    Statistics: Num rows: 174233858 Data size: 23594764438 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col4 (type: decimal(7,2)), _col5 (type: decimal(17,2))
+                  value expressions: _col0 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: decimal(7,2)), _col5 (type: decimal(17,2))
         Reducer 4 
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col3 (type: decimal(7,2)), VALUE._col4 (type: decimal(17,2))
+                expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: decimal(7,2)), VALUE._col4 (type: decimal(17,2))
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                 Statistics: Num rows: 174233858 Data size: 23594764438 Basic stats: COMPLETE Column stats: NONE
                 PTF Operator
@@ -195,8 +191,8 @@ STAGE PLANS:
                       Windowing table definition
                         input alias: ptf_1
                         name: windowingtablefunction
-                        order by: _col3 ASC NULLS FIRST
-                        partition by: _col3
+                        order by: _col1 ASC NULLS FIRST
+                        partition by: _col1
                         raw input shape:
                         window functions:
                             window function definition
@@ -207,7 +203,7 @@ STAGE PLANS:
                               window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX)
                   Statistics: Num rows: 174233858 Data size: 23594764438 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: decimal(7,2)), _col5 (type: decimal(17,2)), ((_col5 * 100) / sum_window_0) (type: decimal(38,17)), _col0 (type: string)
+                    expressions: _col3 (type: string), _col0 (type: string), _col1 (type: string), _col4 (type: decimal(7,2)), _col5 (type: decimal(17,2)), ((_col5 * 100) / sum_window_0) (type: decimal(38,17)), _col2 (type: string)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
                     Statistics: Num rows: 174233858 Data size: 23594764438 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/1b01f9c4/ql/src/test/results/clientpositive/perf/spark/query47.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/perf/spark/query47.q.out b/ql/src/test/results/clientpositive/perf/spark/query47.q.out
index a61afcd..5175f80 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query47.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query47.q.out
@@ -415,33 +415,29 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
                 Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col3 (type: string), _col2 (type: string), _col4 (type: string), _col5 (type: string), _col0 (type: int), _col1 (type: int), _col6 (type: decimal(17,2))
-                  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+                Reduce Output Operator
+                  key expressions: _col3 (type: string), _col2 (type: string), _col4 (type: string), _col5 (type: string), _col0 (type: int)
+                  sort order: +++++
+                  Map-reduce partition columns: _col3 (type: string), _col2 (type: string), _col4 (type: string), _col5 (type: string), _col0 (type: int)
                   Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: int)
-                    sort order: +++++
-                    Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: int)
-                    Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col5 (type: int), _col6 (type: decimal(17,2))
+                  value expressions: _col1 (type: int), _col6 (type: decimal(17,2))
         Reducer 15 
             Reduce Operator Tree:
               Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey4 (type: int), VALUE._col0 (type: int), VALUE._col1 (type: decimal(17,2))
+                expressions: KEY.reducesinkkey4 (type: int), VALUE._col0 (type: int), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey0 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), VALUE._col1 (type: decimal(17,2))
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
                 Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
                 PTF Operator
                   Function definitions:
                       Input definition
                         input alias: ptf_0
-                        output shape: _col0: string, _col1: string, _col2: string, _col3: string, _col4: int, _col5: int, _col6: decimal(17,2)
+                        output shape: _col0: int, _col1: int, _col2: string, _col3: string, _col4: string, _col5: string, _col6: decimal(17,2)
                         type: WINDOWING
                       Windowing table definition
                         input alias: ptf_1
                         name: windowingtablefunction
-                        order by: _col0 ASC NULLS FIRST, _col1 ASC NULLS FIRST, _col2 ASC NULLS FIRST, _col3 ASC NULLS FIRST, _col4 ASC NULLS FIRST
-                        partition by: _col0, _col1, _col2, _col3, _col4
+                        order by: _col3 ASC NULLS FIRST, _col2 ASC NULLS FIRST, _col4 ASC NULLS FIRST, _col5 ASC NULLS FIRST, _col0 ASC NULLS FIRST
+                        partition by: _col3, _col2, _col4, _col5, _col0
                         raw input shape:
                         window functions:
                             window function definition
@@ -452,54 +448,54 @@ STAGE PLANS:
                               window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX)
                   Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
                   Select Operator
-                    expressions: avg_window_0 (type: decimal(21,6)), _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: int), _col5 (type: int), _col6 (type: decimal(17,2))
+                    expressions: avg_window_0 (type: decimal(21,6)), _col0 (type: int), _col1 (type: int), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string), _col6 (type: decimal(17,2))
                     outputColumnNames: avg_window_0, _col0, _col1, _col2, _col3, _col4, _col5, _col6
                     Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
                     Reduce Output Operator
-                      key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: int), _col5 (type: int)
+                      key expressions: _col3 (type: string), _col2 (type: string), _col4 (type: string), _col5 (type: string), _col0 (type: int), _col1 (type: int)
                       sort order: ++++++
-                      Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
+                      Map-reduce partition columns: _col3 (type: string), _col2 (type: string), _col4 (type: string), _col5 (type: string)
                       Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
                       value expressions: avg_window_0 (type: decimal(21,6)), _col6 (type: decimal(17,2))
         Reducer 16 
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: decimal(21,6)), KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey4 (type: int), KEY.reducesinkkey5 (type: int), VALUE._col1 (type: decimal(17,2))
+                expressions: VALUE._col0 (type: decimal(21,6)), KEY.reducesinkkey4 (type: int), KEY.reducesinkkey5 (type: int), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey0 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), VALUE._col1 (type: decimal(17,2))
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
                 Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
                 PTF Operator
                   Function definitions:
                       Input definition
                         input alias: ptf_0
-                        output shape: _col0: decimal(21,6), _col1: string, _col2: string, _col3: string, _col4: string, _col5: int, _col6: int, _col7: decimal(17,2)
+                        output shape: _col0: decimal(21,6), _col1: int, _col2: int, _col3: string, _col4: string, _col5: string, _col6: string, _col7: decimal(17,2)
                         type: WINDOWING
                       Windowing table definition
                         input alias: ptf_1
                         name: windowingtablefunction
-                        order by: _col5 ASC NULLS FIRST, _col6 ASC NULLS FIRST
-                        partition by: _col1, _col2, _col3, _col4
+                        order by: _col1 ASC NULLS FIRST, _col2 ASC NULLS FIRST
+                        partition by: _col4, _col3, _col5, _col6
                         raw input shape:
                         window functions:
                             window function definition
                               alias: rank_window_1
-                              arguments: _col5, _col6
+                              arguments: _col1, _col2
                               name: rank
                               window function: GenericUDAFRankEvaluator
                               window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX)
                               isPivotResult: true
                   Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((_col0 > 0) and (_col5 = 2000) and rank_window_1 is not null) (type: boolean)
+                    predicate: ((_col0 > 0) and (_col1 = 2000) and rank_window_1 is not null) (type: boolean)
                     Statistics: Num rows: 63887519 Data size: 5636175475 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: rank_window_1 (type: int), _col0 (type: decimal(21,6)), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: int), _col7 (type: decimal(17,2))
+                      expressions: rank_window_1 (type: int), _col0 (type: decimal(21,6)), _col1 (type: int), _col2 (type: int), _col3 (type: string), _col4 (type: string), _col5 (type: string), _col6 (type: string), _col7 (type: decimal(17,2))
                       outputColumnNames: rank_window_1, _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
                       Statistics: Num rows: 63887519 Data size: 5636175475 Basic stats: COMPLETE Column stats: NONE
                       Filter Operator
                         predicate: CASE WHEN ((_col0 > 0)) THEN (((abs((_col7 - _col0)) / _col0) > 0.1)) ELSE (null) END (type: boolean)
                         Statistics: Num rows: 31943759 Data size: 2818087693 Basic stats: COMPLETE Column stats: NONE
                         Select Operator
-                          expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: int), _col7 (type: decimal(17,2)), _col0 (type: decimal(21,6)), rank_window_1 (type: int)
+                          expressions: _col4 (type: string), _col3 (type: string), _col5 (type: string), _col6 (type: string), _col1 (type: int), _col2 (type: int), _col7 (type: decimal(17,2)), _col0 (type: decimal(21,6)), rank_window_1 (type: int)
                           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
                           Statistics: Num rows: 31943759 Data size: 2818087693 Basic stats: COMPLETE Column stats: NONE
                           Reduce Output Operator
@@ -582,38 +578,34 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
                 Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col3 (type: string), _col2 (type: string), _col4 (type: string), _col5 (type: string), _col0 (type: int), _col1 (type: int), _col6 (type: decimal(17,2))
-                  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+                Reduce Output Operator
+                  key expressions: _col3 (type: string), _col2 (type: string), _col4 (type: string), _col5 (type: string), _col0 (type: int), _col1 (type: int)
+                  sort order: ++++++
+                  Map-reduce partition columns: _col3 (type: string), _col2 (type: string), _col4 (type: string), _col5 (type: string)
                   Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: int), _col5 (type: int)
-                    sort order: ++++++
-                    Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-                    Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col6 (type: decimal(17,2))
+                  value expressions: _col6 (type: decimal(17,2))
         Reducer 24 
             Reduce Operator Tree:
               Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey4 (type: int), KEY.reducesinkkey5 (type: int), VALUE._col0 (type: decimal(17,2))
+                expressions: KEY.reducesinkkey4 (type: int), KEY.reducesinkkey5 (type: int), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey0 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), VALUE._col0 (type: decimal(17,2))
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
                 Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
                 PTF Operator
                   Function definitions:
                       Input definition
                         input alias: ptf_0
-                        output shape: _col0: string, _col1: string, _col2: string, _col3: string, _col4: int, _col5: int, _col6: decimal(17,2)
+                        output shape: _col0: int, _col1: int, _col2: string, _col3: string, _col4: string, _col5: string, _col6: decimal(17,2)
                         type: WINDOWING
                       Windowing table definition
                         input alias: ptf_1
                         name: windowingtablefunction
-                        order by: _col4 ASC NULLS FIRST, _col5 ASC NULLS FIRST
-                        partition by: _col0, _col1, _col2, _col3
+                        order by: _col0 ASC NULLS FIRST, _col1 ASC NULLS FIRST
+                        partition by: _col3, _col2, _col4, _col5
                         raw input shape:
                         window functions:
                             window function definition
                               alias: rank_window_0
-                              arguments: _col4, _col5
+                              arguments: _col0, _col1
                               name: rank
                               window function: GenericUDAFRankEvaluator
                               window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX)
@@ -623,7 +615,7 @@ STAGE PLANS:
                     predicate: rank_window_0 is not null (type: boolean)
                     Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col6 (type: decimal(17,2)), rank_window_0 (type: int)
+                      expressions: _col3 (type: string), _col2 (type: string), _col4 (type: string), _col5 (type: string), _col6 (type: decimal(17,2)), rank_window_0 (type: int)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                       Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
@@ -674,38 +666,34 @@ STAGE PLANS:
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
                 Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col3 (type: string), _col2 (type: string), _col4 (type: string), _col5 (type: string), _col0 (type: int), _col1 (type: int), _col6 (type: decimal(17,2))
-                  outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+                Reduce Output Operator
+                  key expressions: _col3 (type: string), _col2 (type: string), _col4 (type: string), _col5 (type: string), _col0 (type: int), _col1 (type: int)
+                  sort order: ++++++
+                  Map-reduce partition columns: _col3 (type: string), _col2 (type: string), _col4 (type: string), _col5 (type: string)
                   Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: int), _col5 (type: int)
-                    sort order: ++++++
-                    Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
-                    Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
-                    value expressions: _col6 (type: decimal(17,2))
+                  value expressions: _col6 (type: decimal(17,2))
         Reducer 5 
             Reduce Operator Tree:
               Select Operator
-                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey4 (type: int), KEY.reducesinkkey5 (type: int), VALUE._col0 (type: decimal(17,2))
+                expressions: KEY.reducesinkkey4 (type: int), KEY.reducesinkkey5 (type: int), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey0 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), VALUE._col0 (type: decimal(17,2))
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
                 Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
                 PTF Operator
                   Function definitions:
                       Input definition
                         input alias: ptf_0
-                        output shape: _col0: string, _col1: string, _col2: string, _col3: string, _col4: int, _col5: int, _col6: decimal(17,2)
+                        output shape: _col0: int, _col1: int, _col2: string, _col3: string, _col4: string, _col5: string, _col6: decimal(17,2)
                         type: WINDOWING
                       Windowing table definition
                         input alias: ptf_1
                         name: windowingtablefunction
-                        order by: _col4 ASC NULLS FIRST, _col5 ASC NULLS FIRST
-                        partition by: _col0, _col1, _col2, _col3
+                        order by: _col0 ASC NULLS FIRST, _col1 ASC NULLS FIRST
+                        partition by: _col3, _col2, _col4, _col5
                         raw input shape:
                         window functions:
                             window function definition
                               alias: rank_window_0
-                              arguments: _col4, _col5
+                              arguments: _col0, _col1
                               name: rank
                               window function: GenericUDAFRankEvaluator
                               window frame: ROWS PRECEDING(MAX)~FOLLOWING(MAX)
@@ -715,7 +703,7 @@ STAGE PLANS:
                     predicate: rank_window_0 is not null (type: boolean)
                     Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col6 (type: decimal(17,2)), rank_window_0 (type: int)
+                      expressions: _col3 (type: string), _col2 (type: string), _col4 (type: string), _col5 (type: string), _col6 (type: decimal(17,2)), rank_window_0 (type: int)
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
                       Statistics: Num rows: 383325119 Data size: 33817053293 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator