You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2018/03/24 17:32:43 UTC

[6/9] hive git commit: HIVE-18780 : Improve schema discovery For Druid Storage Handler (Slim Bouguerra via Ashutosh Chauhan) HIVE-18993 : Use Druid Expressions HIVE-14518 : Support 'having' translation for Druid GroupBy queries HIVE-18957 : Upgrade Calci

http://git-wip-us.apache.org/repos/asf/hive/blob/696affa2/ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out b/ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out
new file mode 100644
index 0000000..0405b7d
--- /dev/null
+++ b/ql/src/test/results/clientpositive/druid/druidmini_floorTime.q.out
@@ -0,0 +1,1026 @@
+PREHOOK: query: CREATE TABLE druid_table
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
+AS
+SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,
+  cstring1,
+  cstring2,
+  cdouble,
+  cfloat,
+  ctinyint,
+  csmallint,
+  cint,
+  cbigint,
+  cboolean1,
+  cboolean2
+  FROM alltypesorc where ctimestamp1 IS NOT NULL
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@alltypesorc
+PREHOOK: Output: database:default
+PREHOOK: Output: default@druid_table
+POSTHOOK: query: CREATE TABLE druid_table
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "HOUR", "druid.query.granularity" = "MINUTE")
+AS
+SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,
+  cstring1,
+  cstring2,
+  cdouble,
+  cfloat,
+  ctinyint,
+  csmallint,
+  cint,
+  cbigint,
+  cboolean1,
+  cboolean2
+  FROM alltypesorc where ctimestamp1 IS NOT NULL
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@alltypesorc
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@druid_table
+POSTHOOK: Lineage: druid_table.__time EXPRESSION [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp1, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: druid_table.cbigint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), ]
+POSTHOOK: Lineage: druid_table.cboolean1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean1, type:boolean, comment:null), ]
+POSTHOOK: Lineage: druid_table.cboolean2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cboolean2, type:boolean, comment:null), ]
+POSTHOOK: Lineage: druid_table.cdouble SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), ]
+POSTHOOK: Lineage: druid_table.cfloat SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ]
+POSTHOOK: Lineage: druid_table.cint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ]
+POSTHOOK: Lineage: druid_table.csmallint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:csmallint, type:smallint, comment:null), ]
+POSTHOOK: Lineage: druid_table.cstring1 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, comment:null), ]
+POSTHOOK: Lineage: druid_table.cstring2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:cstring2, type:string, comment:null), ]
+POSTHOOK: Lineage: druid_table.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ]
+PREHOOK: query: SELECT floor(`__time` to SECOND) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY floor(`__time` to SECOND)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT floor(`__time` to SECOND) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY floor(`__time` to SECOND)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1969-12-31 15:59:00.0 US/Pacific
+1969-12-31 16:00:00.0 US/Pacific
+PREHOOK: query: EXPLAIN SELECT floor(`__time` to SECOND) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY floor(`__time` to SECOND)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT floor(`__time` to SECOND) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY floor(`__time` to SECOND)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: druid_table
+                  filterExpr: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                  properties:
+                    druid.fieldNames vc,ctinyint
+                    druid.fieldTypes timestamp with local time zone,tinyint
+                    druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"expression","expression":"(strlen(CAST(\"ctinyint\", 'STRING')) > 1)"},{"type":"expression","expression":"((pow(\"cfloat\",2) * pow(\"csmallint\",3)) > 1)"},{"type":"expression","expression":"(sqrt(abs(\"ctinyint\")) > 3)"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","ctinyint"],"resultFormat":"compactedList"}
+                    druid.query.type scan
+                  Statistics: Num rows: 9173 Data size: 383504 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                    Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: floor_second(vc) (type: timestamp with local time zone)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                      Group By Operator
+                        keys: _col0 (type: timestamp with local time zone)
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: timestamp with local time zone)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: timestamp with local time zone)
+                          Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: timestamp with local time zone)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT floor(`__time` to MINUTE) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY floor(`__time` to MINUTE)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT floor(`__time` to MINUTE) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY floor(`__time` to MINUTE)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1969-12-31 15:59:00.0 US/Pacific
+1969-12-31 16:00:00.0 US/Pacific
+PREHOOK: query: EXPLAIN SELECT floor(`__time` to MINUTE) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY floor(`__time` to MINUTE)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT floor(`__time` to MINUTE) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY floor(`__time` to MINUTE)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: druid_table
+                  filterExpr: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                  properties:
+                    druid.fieldNames vc,ctinyint
+                    druid.fieldTypes timestamp with local time zone,tinyint
+                    druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"expression","expression":"(strlen(CAST(\"ctinyint\", 'STRING')) > 1)"},{"type":"expression","expression":"((pow(\"cfloat\",2) * pow(\"csmallint\",3)) > 1)"},{"type":"expression","expression":"(sqrt(abs(\"ctinyint\")) > 3)"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","ctinyint"],"resultFormat":"compactedList"}
+                    druid.query.type scan
+                  Statistics: Num rows: 9173 Data size: 383504 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                    Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: floor_minute(vc) (type: timestamp with local time zone)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                      Group By Operator
+                        keys: _col0 (type: timestamp with local time zone)
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: timestamp with local time zone)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: timestamp with local time zone)
+                          Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: timestamp with local time zone)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT floor(`__time` to HOUR) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY floor(`__time` to HOUR)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT floor(`__time` to HOUR) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY floor(`__time` to HOUR)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1969-12-31 15:00:00.0 US/Pacific
+1969-12-31 16:00:00.0 US/Pacific
+PREHOOK: query: EXPLAIN SELECT floor(`__time` to HOUR) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY floor(`__time` to HOUR)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT floor(`__time` to HOUR) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY floor(`__time` to HOUR)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: druid_table
+                  filterExpr: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                  properties:
+                    druid.fieldNames vc,ctinyint
+                    druid.fieldTypes timestamp with local time zone,tinyint
+                    druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"expression","expression":"(strlen(CAST(\"ctinyint\", 'STRING')) > 1)"},{"type":"expression","expression":"((pow(\"cfloat\",2) * pow(\"csmallint\",3)) > 1)"},{"type":"expression","expression":"(sqrt(abs(\"ctinyint\")) > 3)"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","ctinyint"],"resultFormat":"compactedList"}
+                    druid.query.type scan
+                  Statistics: Num rows: 9173 Data size: 383504 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                    Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: floor_hour(vc) (type: timestamp with local time zone)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                      Group By Operator
+                        keys: _col0 (type: timestamp with local time zone)
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: timestamp with local time zone)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: timestamp with local time zone)
+                          Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: timestamp with local time zone)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT EXTRACT(DAY from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(DAY from `__time`)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT EXTRACT(DAY from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(DAY from `__time`)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+31
+PREHOOK: query: EXPLAIN SELECT EXTRACT(DAY from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(DAY from `__time`)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT EXTRACT(DAY from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(DAY from `__time`)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: druid_table
+                  filterExpr: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                  properties:
+                    druid.fieldNames vc,ctinyint
+                    druid.fieldTypes timestamp with local time zone,tinyint
+                    druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"expression","expression":"(strlen(CAST(\"ctinyint\", 'STRING')) > 1)"},{"type":"expression","expression":"((pow(\"cfloat\",2) * pow(\"csmallint\",3)) > 1)"},{"type":"expression","expression":"(sqrt(abs(\"ctinyint\")) > 3)"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","ctinyint"],"resultFormat":"compactedList"}
+                    druid.query.type scan
+                  Statistics: Num rows: 9173 Data size: 383504 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                    Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: day(vc) (type: int)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                      Group By Operator
+                        keys: _col0 (type: int)
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: int)
+                          Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT EXTRACT(WEEK from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(WEEK from `__time`)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT EXTRACT(WEEK from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(WEEK from `__time`)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1
+PREHOOK: query: EXPLAIN SELECT EXTRACT(WEEK from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(WEEK from `__time`)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT EXTRACT(WEEK from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(WEEK from `__time`)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: druid_table
+                  filterExpr: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                  properties:
+                    druid.fieldNames vc,ctinyint
+                    druid.fieldTypes timestamp with local time zone,tinyint
+                    druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"expression","expression":"(strlen(CAST(\"ctinyint\", 'STRING')) > 1)"},{"type":"expression","expression":"((pow(\"cfloat\",2) * pow(\"csmallint\",3)) > 1)"},{"type":"expression","expression":"(sqrt(abs(\"ctinyint\")) > 3)"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","ctinyint"],"resultFormat":"compactedList"}
+                    druid.query.type scan
+                  Statistics: Num rows: 9173 Data size: 383504 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                    Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: weekofyear(vc) (type: int)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                      Group By Operator
+                        keys: _col0 (type: int)
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: int)
+                          Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT EXTRACT(MONTH from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(MONTH from `__time`)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT EXTRACT(MONTH from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(MONTH from `__time`)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+12
+PREHOOK: query: EXPLAIN SELECT EXTRACT(MONTH from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(MONTH from `__time`)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT EXTRACT(MONTH from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(MONTH from `__time`)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: druid_table
+                  filterExpr: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                  properties:
+                    druid.fieldNames vc,ctinyint
+                    druid.fieldTypes timestamp with local time zone,tinyint
+                    druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"expression","expression":"(strlen(CAST(\"ctinyint\", 'STRING')) > 1)"},{"type":"expression","expression":"((pow(\"cfloat\",2) * pow(\"csmallint\",3)) > 1)"},{"type":"expression","expression":"(sqrt(abs(\"ctinyint\")) > 3)"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","ctinyint"],"resultFormat":"compactedList"}
+                    druid.query.type scan
+                  Statistics: Num rows: 9173 Data size: 383504 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                    Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: month(vc) (type: int)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                      Group By Operator
+                        keys: _col0 (type: int)
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: int)
+                          Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT EXTRACT(QUARTER from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(QUARTER from `__time`)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT EXTRACT(QUARTER from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(QUARTER from `__time`)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+4
+PREHOOK: query: EXPLAIN SELECT EXTRACT(QUARTER from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(QUARTER from `__time`)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT EXTRACT(QUARTER from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(QUARTER from `__time`)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: druid_table
+                  filterExpr: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                  properties:
+                    druid.fieldNames vc,ctinyint
+                    druid.fieldTypes timestamp with local time zone,tinyint
+                    druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"expression","expression":"(strlen(CAST(\"ctinyint\", 'STRING')) > 1)"},{"type":"expression","expression":"((pow(\"cfloat\",2) * pow(\"csmallint\",3)) > 1)"},{"type":"expression","expression":"(sqrt(abs(\"ctinyint\")) > 3)"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","ctinyint"],"resultFormat":"compactedList"}
+                    druid.query.type scan
+                  Statistics: Num rows: 9173 Data size: 383504 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                    Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: quarter(vc) (type: int)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                      Group By Operator
+                        keys: _col0 (type: int)
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: int)
+                          Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT EXTRACT(YEAR from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(YEAR from `__time`)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT EXTRACT(YEAR from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(YEAR from `__time`)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1969
+PREHOOK: query: EXPLAIN SELECT EXTRACT(YEAR from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(YEAR from `__time`)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT EXTRACT(YEAR from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 GROUP BY EXTRACT(YEAR from `__time`)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: druid_table
+                  filterExpr: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                  properties:
+                    druid.fieldNames vc,ctinyint
+                    druid.fieldTypes timestamp with local time zone,tinyint
+                    druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"expression","expression":"(strlen(CAST(\"ctinyint\", 'STRING')) > 1)"},{"type":"expression","expression":"((pow(\"cfloat\",2) * pow(\"csmallint\",3)) > 1)"},{"type":"expression","expression":"(sqrt(abs(\"ctinyint\")) > 3)"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","ctinyint"],"resultFormat":"compactedList"}
+                    druid.query.type scan
+                  Statistics: Num rows: 9173 Data size: 383504 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+                    Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: year(vc) (type: int)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                      Group By Operator
+                        keys: _col0 (type: int)
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int)
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: int)
+                          Statistics: Num rows: 3057 Data size: 127806 Basic stats: COMPLETE Column stats: NONE
+        Reducer 2 
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1528 Data size: 63882 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: EXPLAIN SELECT EXTRACT(SECOND from `__time`) FROM druid_table WHERE EXTRACT(SECOND from `__time`) = 0  LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT EXTRACT(SECOND from `__time`) FROM druid_table WHERE EXTRACT(SECOND from `__time`) = 0  LIMIT 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_table
+          properties:
+            druid.fieldNames vc
+            druid.fieldTypes int
+            druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"bound","dimension":"__time","lower":"0","lowerStrict":false,"upper":"0","upperStrict":false,"ordering":"numeric","extractionFn":{"type":"timeFormat","format":"s","timeZone":"US/Pacific","locale":"en-US"}},"virtualColumns":[{"type":"expression","name":"vc","expression":"0","outputType":"LONG"}],"columns":["vc"],"resultFormat":"compactedList","limit":1}
+            druid.query.type scan
+          Select Operator
+            expressions: vc (type: int)
+            outputColumnNames: _col0
+            ListSink
+
+PREHOOK: query: SELECT EXTRACT(SECOND from `__time`) FROM druid_table WHERE EXTRACT(SECOND from `__time`) = 0  LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT EXTRACT(SECOND from `__time`) FROM druid_table WHERE EXTRACT(SECOND from `__time`) = 0  LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+0
+0
+PREHOOK: query: EXPLAIN SELECT EXTRACT(MINUTE from `__time`) FROM druid_table
+WHERE  EXTRACT(MINUTE from `__time`) >= 0 LIMIT 2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT EXTRACT(MINUTE from `__time`) FROM druid_table
+WHERE  EXTRACT(MINUTE from `__time`) >= 0 LIMIT 2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_table
+          properties:
+            druid.fieldNames vc
+            druid.fieldTypes int
+            druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"bound","dimension":"__time","lower":"0","lowerStrict":false,"ordering":"numeric","extractionFn":{"type":"timeFormat","format":"m","timeZone":"US/Pacific","locale":"en-US"}},"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_extract(\"__time\",'MINUTE','US/Pacific')","outputType":"LONG"}],"columns":["vc"],"resultFormat":"compactedList","limit":2}
+            druid.query.type scan
+          Select Operator
+            expressions: vc (type: int)
+            outputColumnNames: _col0
+            ListSink
+
+PREHOOK: query: SELECT EXTRACT(MINUTE from `__time`) as minute FROM druid_table
+       WHERE  EXTRACT(MINUTE from `__time`) >= 0 order by minute LIMIT 2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT EXTRACT(MINUTE from `__time`) as minute FROM druid_table
+       WHERE  EXTRACT(MINUTE from `__time`) >= 0 order by minute LIMIT 2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+0
+0
+PREHOOK: query: EXPLAIN SELECT EXTRACT(HOUR from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT EXTRACT(HOUR from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 LIMIT 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+      Processor Tree:
+        TableScan
+          alias: druid_table
+          filterExpr: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+          properties:
+            druid.fieldNames vc,ctinyint
+            druid.fieldTypes timestamp with local time zone,tinyint
+            druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"expression","expression":"(strlen(CAST(\"ctinyint\", 'STRING')) > 1)"},{"type":"expression","expression":"((pow(\"cfloat\",2) * pow(\"csmallint\",3)) > 1)"},{"type":"expression","expression":"(sqrt(abs(\"ctinyint\")) > 3)"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","ctinyint"],"resultFormat":"compactedList"}
+            druid.query.type scan
+          Filter Operator
+            predicate: (character_length(UDFToString(ctinyint)) < 10) (type: boolean)
+            Select Operator
+              expressions: hour(vc) (type: int)
+              outputColumnNames: _col0
+              Limit
+                Number of rows: 1
+                ListSink
+
+PREHOOK: query: SELECT EXTRACT(HOUR from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT EXTRACT(HOUR from `__time`) FROM druid_table
+WHERE character_length(CAST(ctinyint AS STRING)) > 1 AND char_length(CAST(ctinyint AS STRING)) < 10
+AND power(cfloat, 2) * pow(csmallint, 3) > 1 AND SQRT(ABS(ctinyint)) > 3 LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+15
+PREHOOK: query: EXPLAIN SELECT EXTRACT(DAY from `__time`), EXTRACT(DAY from `__time`) DIV 7 AS WEEK, SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 9, 2) AS day_str
+FROM druid_table WHERE SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 9, 2)  = 31 LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT EXTRACT(DAY from `__time`), EXTRACT(DAY from `__time`) DIV 7 AS WEEK, SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 9, 2) AS day_str
+FROM druid_table WHERE SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 9, 2)  = 31 LIMIT 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_table
+          properties:
+            druid.fieldNames vc,vc0,vc1
+            druid.fieldTypes int,bigint,string
+            druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"expression","expression":"(CAST(substring(timestamp_format(timestamp_floor(\"__time\",'P1D','','US/Pacific'),'yyyy-MM-dd','US/Pacific'), 8, 2), 'DOUBLE') == 31)"},"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_extract(\"__time\",'DAY','US/Pacific')","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"div(timestamp_extract(\"__time\",'DAY','US/Pacific'),7)","outputType":"LONG"},{"type":"expression","name":"vc1","expression":"substring(timestamp_format(timestamp_floor(\"__time\",'P1D','','US/Pacific'),'yyyy-MM-dd','US/Pacific'), 8, 2)","outputType":"STRING"}],"columns":["vc","vc0","vc1"],"resultFormat":"compactedList","limit":1}
+            druid.query.type scan
+          Select Operator
+            expressions: vc (type: int), vc0 (type: bigint), vc1 (type: string)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: SELECT EXTRACT(DAY from `__time`) , EXTRACT(DAY from `__time`) DIV 7 AS WEEK, SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 9, 2) AS dar_str
+FROM druid_table WHERE SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 9, 2)  = 31 LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT EXTRACT(DAY from `__time`) , EXTRACT(DAY from `__time`) DIV 7 AS WEEK, SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 9, 2) AS dar_str
+FROM druid_table WHERE SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 9, 2)  = 31 LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+31	4	31
+31	4	31
+PREHOOK: query: EXPLAIN SELECT EXTRACT(WEEK from `__time`) FROM druid_table WHERE EXTRACT(WEEK from `__time`) >= 1
+AND  EXTRACT(WEEK from `__time`) DIV 4 + 1 = 1 LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT EXTRACT(WEEK from `__time`) FROM druid_table WHERE EXTRACT(WEEK from `__time`) >= 1
+AND  EXTRACT(WEEK from `__time`) DIV 4 + 1 = 1 LIMIT 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_table
+          properties:
+            druid.fieldNames vc
+            druid.fieldTypes int
+            druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"bound","dimension":"__time","lower":"1","lowerStrict":false,"ordering":"numeric","extractionFn":{"type":"timeFormat","format":"w","timeZone":"US/Pacific","locale":"en-US"}},{"type":"expression","expression":"((div(timestamp_extract(\"__time\",'WEEK','US/Pacific'),4) + 1) == 1)"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_extract(\"__time\",'WEEK','US/Pacific')","outputType":"LONG"}],"columns":["vc"],"resultFormat":"compactedList","limit":1}
+            druid.query.type scan
+          Select Operator
+            expressions: vc (type: int)
+            outputColumnNames: _col0
+            ListSink
+
+PREHOOK: query: SELECT EXTRACT(WEEK from `__time`) FROM druid_table WHERE EXTRACT(WEEK from `__time`) >= 1
+AND  EXTRACT(WEEK from `__time`) DIV 4 + 1 = 1 LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT EXTRACT(WEEK from `__time`) FROM druid_table WHERE EXTRACT(WEEK from `__time`) >= 1
+AND  EXTRACT(WEEK from `__time`) DIV 4 + 1 = 1 LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1
+1
+PREHOOK: query: EXPLAIN SELECT EXTRACT(MONTH FROM  `__time`) / 4 + 1, EXTRACT(MONTH FROM  `__time`), SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 6, 2) as month_str FROM druid_table
+WHERE EXTRACT(MONTH FROM  `__time`) / 4 + 1 = 4 AND EXTRACT(MONTH FROM  `__time`) BETWEEN 11 AND 12 LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT EXTRACT(MONTH FROM  `__time`) / 4 + 1, EXTRACT(MONTH FROM  `__time`), SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 6, 2) as month_str FROM druid_table
+WHERE EXTRACT(MONTH FROM  `__time`) / 4 + 1 = 4 AND EXTRACT(MONTH FROM  `__time`) BETWEEN 11 AND 12 LIMIT 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_table
+          properties:
+            druid.fieldNames vc,vc0,vc1
+            druid.fieldTypes double,int,string
+            druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"expression","expression":"(((CAST(timestamp_extract(\"__time\",'MONTH','US/Pacific'), 'DOUBLE') / 4) + 1) == 4)"},{"type":"bound","dimension":"__time","lower":"11","lowerStrict":false,"upper":"12","upperStrict":false,"ordering":"numeric","extractionFn":{"type":"timeFormat","format":"M","timeZone":"US/Pacific","locale":"en-US"}}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"((CAST(timestamp_extract(\"__time\",'MONTH','US/Pacific'), 'DOUBLE') / CAST(4, 'DOUBLE')) + CAST(1, 'DOUBLE'))","outputType":"DOUBLE"},{"type":"expression","name":"vc0","expression":"timestamp_extract(\"__time\",'MONTH','US/Pacific')","outputType":"LONG"},{"type":"expression","name":"vc1","expression":"substring(timestamp_format(timestamp_floor(\"__time\",'P1D','','US/Pacific'),'yyyy-MM-dd','US/Pacific'),
  5, 2)","outputType":"STRING"}],"columns":["vc","vc0","vc1"],"resultFormat":"compactedList","limit":1}
+            druid.query.type scan
+          Select Operator
+            expressions: vc (type: double), vc0 (type: int), vc1 (type: string)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: SELECT EXTRACT(MONTH FROM  `__time`) / 4 + 1, EXTRACT(MONTH FROM  `__time`), SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 6, 2) as month_str FROM druid_table
+       WHERE EXTRACT(MONTH FROM  `__time`) / 4 + 1 = 4 AND EXTRACT(MONTH FROM  `__time`) BETWEEN 11 AND 12 LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT EXTRACT(MONTH FROM  `__time`) / 4 + 1, EXTRACT(MONTH FROM  `__time`), SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 6, 2) as month_str FROM druid_table
+       WHERE EXTRACT(MONTH FROM  `__time`) / 4 + 1 = 4 AND EXTRACT(MONTH FROM  `__time`) BETWEEN 11 AND 12 LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+4.0	12	12
+4.0	12	12
+PREHOOK: query: EXPLAIN SELECT EXTRACT(QUARTER from `__time`),  EXTRACT(MONTH FROM  `__time`) / 4 + 1 as q_number FROM druid_table WHERE EXTRACT(QUARTER from `__time`) >= 4
+          AND EXTRACT(MONTH FROM  `__time`) / 4 + 1 = 4 LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT EXTRACT(QUARTER from `__time`),  EXTRACT(MONTH FROM  `__time`) / 4 + 1 as q_number FROM druid_table WHERE EXTRACT(QUARTER from `__time`) >= 4
+          AND EXTRACT(MONTH FROM  `__time`) / 4 + 1 = 4 LIMIT 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_table
+          properties:
+            druid.fieldNames vc,vc0
+            druid.fieldTypes int,double
+            druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"expression","expression":"(timestamp_extract(\"__time\",'QUARTER','US/Pacific') >= 4)"},{"type":"expression","expression":"(((CAST(timestamp_extract(\"__time\",'MONTH','US/Pacific'), 'DOUBLE') / 4) + 1) == 4)"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_extract(\"__time\",'QUARTER','US/Pacific')","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"((CAST(timestamp_extract(\"__time\",'MONTH','US/Pacific'), 'DOUBLE') / CAST(4, 'DOUBLE')) + CAST(1, 'DOUBLE'))","outputType":"DOUBLE"}],"columns":["vc","vc0"],"resultFormat":"compactedList","limit":1}
+            druid.query.type scan
+          Select Operator
+            expressions: vc (type: int), vc0 (type: double)
+            outputColumnNames: _col0, _col1
+            ListSink
+
+PREHOOK: query: SELECT EXTRACT(QUARTER from `__time`), EXTRACT(MONTH FROM  `__time`) / 4 + 1  as q_number FROM druid_table WHERE EXTRACT(QUARTER from `__time`) >= 4
+  AND EXTRACT(MONTH FROM  `__time`) / 4 + 1 = 4 LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT EXTRACT(QUARTER from `__time`), EXTRACT(MONTH FROM  `__time`) / 4 + 1  as q_number FROM druid_table WHERE EXTRACT(QUARTER from `__time`) >= 4
+  AND EXTRACT(MONTH FROM  `__time`) / 4 + 1 = 4 LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+4	4.0
+4	4.0
+PREHOOK: query: EXPLAIN SELECT EXTRACT(YEAR from `__time`), SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 1, 4) AS year_str FROM druid_table WHERE EXTRACT(YEAR from `__time`) >= 1969
+AND CAST(EXTRACT(YEAR from `__time`) as STRING) = '1969' LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN SELECT EXTRACT(YEAR from `__time`), SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 1, 4) AS year_str FROM druid_table WHERE EXTRACT(YEAR from `__time`) >= 1969
+AND CAST(EXTRACT(YEAR from `__time`) as STRING) = '1969' LIMIT 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_table
+          properties:
+            druid.fieldNames vc,vc0
+            druid.fieldTypes int,string
+            druid.query.json {"queryType":"scan","dataSource":"default.druid_table","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"filter":{"type":"and","fields":[{"type":"bound","dimension":"__time","lower":"1969","lowerStrict":false,"ordering":"numeric","extractionFn":{"type":"timeFormat","format":"yyyy","timeZone":"US/Pacific","locale":"en-US"}},{"type":"expression","expression":"(CAST(timestamp_extract(\"__time\",'YEAR','US/Pacific'), 'STRING') == '1969')"}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"timestamp_extract(\"__time\",'YEAR','US/Pacific')","outputType":"LONG"},{"type":"expression","name":"vc0","expression":"substring(timestamp_format(timestamp_floor(\"__time\",'P1D','','US/Pacific'),'yyyy-MM-dd','US/Pacific'), 0, 4)","outputType":"STRING"}],"columns":["vc","vc0"],"resultFormat":"compactedList","limit":1}
+            druid.query.type scan
+          Select Operator
+            expressions: vc (type: int), vc0 (type: string)
+            outputColumnNames: _col0, _col1
+            ListSink
+
+PREHOOK: query: SELECT EXTRACT(YEAR from `__time`), SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 1, 4) as year_str FROM druid_table WHERE EXTRACT(YEAR from `__time`) >= 1969
+AND CAST(EXTRACT(YEAR from `__time`) as STRING) = '1969' LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: SELECT EXTRACT(YEAR from `__time`), SUBSTRING(CAST(CAST(`__time` AS DATE) AS STRING), 1, 4) as year_str FROM druid_table WHERE EXTRACT(YEAR from `__time`) >= 1969
+AND CAST(EXTRACT(YEAR from `__time`) as STRING) = '1969' LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1969	1969
+1969	1969
+PREHOOK: query: DROP TABLE druid_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@druid_table
+PREHOOK: Output: default@druid_table
+POSTHOOK: query: DROP TABLE druid_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@druid_table
+POSTHOOK: Output: default@druid_table

http://git-wip-us.apache.org/repos/asf/hive/blob/696affa2/ql/src/test/results/clientpositive/druid/druidmini_mv.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_mv.q.out b/ql/src/test/results/clientpositive/druid/druidmini_mv.q.out
index 3ff7b39..97f6d84 100644
--- a/ql/src/test/results/clientpositive/druid/druidmini_mv.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidmini_mv.q.out
@@ -5,16 +5,17 @@ AS
 SELECT cast(current_timestamp() AS timestamp) AS t,
        cast(a AS int) AS a,
        cast(b AS varchar(256)) AS b,
+       cast(userid AS varchar(256)) AS userid,
        cast(c AS double) AS c,
        cast(d AS int) AS d
 FROM TABLE (
   VALUES
-    (1, 'alfred', 10.30, 2),
-    (2, 'bob', 3.14, 3),
-    (2, 'bonnie', 172342.2, 3),
-    (3, 'calvin', 978.76, 3),
-    (3, 'charlie', 9.8, 1),
-    (3, 'charlie', 15.8, 1)) as q (a, b, c, d)
+    (1, 'alfred', 'alfred', 10.30, 2),
+    (2, 'bob', 'bob', 3.14, 3),
+    (2, 'bonnie', 'bonnie', 172342.2, 3),
+    (3, 'calvin', 'calvin', 978.76, 3),
+    (3, 'charlie', 'charlie_a', 9.8, 1),
+    (3, 'charlie', 'charlie_b', 15.8, 1)) as q (a, b, userid, c, d)
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: _dummy_database@_dummy_table
 PREHOOK: Output: database:default
@@ -26,16 +27,17 @@ AS
 SELECT cast(current_timestamp() AS timestamp) AS t,
        cast(a AS int) AS a,
        cast(b AS varchar(256)) AS b,
+       cast(userid AS varchar(256)) AS userid,
        cast(c AS double) AS c,
        cast(d AS int) AS d
 FROM TABLE (
   VALUES
-    (1, 'alfred', 10.30, 2),
-    (2, 'bob', 3.14, 3),
-    (2, 'bonnie', 172342.2, 3),
-    (3, 'calvin', 978.76, 3),
-    (3, 'charlie', 9.8, 1),
-    (3, 'charlie', 15.8, 1)) as q (a, b, c, d)
+    (1, 'alfred', 'alfred', 10.30, 2),
+    (2, 'bob', 'bob', 3.14, 3),
+    (2, 'bonnie', 'bonnie', 172342.2, 3),
+    (3, 'calvin', 'calvin', 978.76, 3),
+    (3, 'charlie', 'charlie_a', 9.8, 1),
+    (3, 'charlie', 'charlie_b', 15.8, 1)) as q (a, b, userid, c, d)
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: _dummy_database@_dummy_table
 POSTHOOK: Output: database:default
@@ -45,11 +47,12 @@ POSTHOOK: Lineage: cmv_basetable.b SCRIPT []
 POSTHOOK: Lineage: cmv_basetable.c SCRIPT []
 POSTHOOK: Lineage: cmv_basetable.d SCRIPT []
 POSTHOOK: Lineage: cmv_basetable.t SIMPLE []
+POSTHOOK: Lineage: cmv_basetable.userid SCRIPT []
 PREHOOK: query: CREATE MATERIALIZED VIEW cmv_mat_view ENABLE REWRITE
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR")
 AS
-SELECT cast(t AS timestamp with local time zone) as `__time`, a, b, c
+SELECT cast(t AS timestamp with local time zone) as `__time`, a, b, c, userid
 FROM cmv_basetable
 WHERE a = 2
 PREHOOK: type: CREATE_MATERIALIZED_VIEW
@@ -60,7 +63,7 @@ POSTHOOK: query: CREATE MATERIALIZED VIEW cmv_mat_view ENABLE REWRITE
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR")
 AS
-SELECT cast(t AS timestamp with local time zone) as `__time`, a, b, c
+SELECT cast(t AS timestamp with local time zone) as `__time`, a, b, c, userid
 FROM cmv_basetable
 WHERE a = 2
 POSTHOOK: type: CREATE_MATERIALIZED_VIEW
@@ -94,7 +97,7 @@ PREHOOK: query: CREATE MATERIALIZED VIEW IF NOT EXISTS cmv_mat_view2 ENABLE REWR
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR")
 AS
-SELECT cast(t AS timestamp with local time zone) as `__time`, a, b, c
+SELECT cast(t AS timestamp with local time zone) as `__time`, a, b, c, userid
 FROM cmv_basetable
 WHERE a = 3
 PREHOOK: type: CREATE_MATERIALIZED_VIEW
@@ -105,7 +108,7 @@ POSTHOOK: query: CREATE MATERIALIZED VIEW IF NOT EXISTS cmv_mat_view2 ENABLE REW
 STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
 TBLPROPERTIES ("druid.segment.granularity" = "HOUR")
 AS
-SELECT cast(t AS timestamp with local time zone) as `__time`, a, b, c
+SELECT cast(t AS timestamp with local time zone) as `__time`, a, b, c, userid
 FROM cmv_basetable
 WHERE a = 3
 POSTHOOK: type: CREATE_MATERIALIZED_VIEW
@@ -120,8 +123,9 @@ POSTHOOK: query: SELECT a, c FROM cmv_mat_view2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@cmv_mat_view2
 POSTHOOK: Output: hdfs://### HDFS PATH ###
+3	15.800000190734863
+3	9.800000190734863
 3	978.760009765625
-6	25.600000381469727
 PREHOOK: query: SHOW TBLPROPERTIES cmv_mat_view2
 PREHOOK: type: SHOW_TBLPROPERTIES
 POSTHOOK: query: SHOW TBLPROPERTIES cmv_mat_view2
@@ -285,12 +289,12 @@ POSTHOOK: Output: hdfs://### HDFS PATH ###
 3	9.8	3	978.76
 3	978.76	3	978.76
 PREHOOK: query: INSERT INTO cmv_basetable VALUES
- (cast(current_timestamp() AS timestamp), 3, 'charlie', 15.8, 1)
+ (cast(current_timestamp() AS timestamp), 3, 'charlie', 'charlie_c', 15.8, 1)
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
 PREHOOK: Output: default@cmv_basetable
 POSTHOOK: query: INSERT INTO cmv_basetable VALUES
- (cast(current_timestamp() AS timestamp), 3, 'charlie', 15.8, 1)
+ (cast(current_timestamp() AS timestamp), 3, 'charlie', 'charlie_c', 15.8, 1)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 POSTHOOK: Output: default@cmv_basetable
@@ -299,6 +303,7 @@ POSTHOOK: Lineage: cmv_basetable.b SCRIPT []
 POSTHOOK: Lineage: cmv_basetable.c SCRIPT []
 POSTHOOK: Lineage: cmv_basetable.d SCRIPT []
 POSTHOOK: Lineage: cmv_basetable.t SCRIPT []
+POSTHOOK: Lineage: cmv_basetable.userid SCRIPT []
 Warning: Shuffle Join MERGEJOIN[13][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
 PREHOOK: query: EXPLAIN
 SELECT * FROM (
@@ -330,33 +335,33 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: cmv_basetable
-                  Statistics: Num rows: 41 Data size: 480 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 30 Data size: 360 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (a = 3) (type: boolean)
-                    Statistics: Num rows: 5 Data size: 58 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 5 Data size: 60 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: c (type: double)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 5 Data size: 58 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 5 Data size: 60 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         sort order: 
-                        Statistics: Num rows: 5 Data size: 58 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 5 Data size: 60 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: double)
         Map 3 
             Map Operator Tree:
                 TableScan
                   alias: cmv_basetable
-                  Statistics: Num rows: 41 Data size: 640 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 30 Data size: 480 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: ((a = 3) and (d = 3)) (type: boolean)
-                    Statistics: Num rows: 1 Data size: 15 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
                       expressions: c (type: double)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 15 Basic stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         sort order: 
-                        Statistics: Num rows: 1 Data size: 15 Basic stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE
                         value expressions: _col0 (type: double)
         Reducer 2 
             Reduce Operator Tree:
@@ -367,14 +372,14 @@ STAGE PLANS:
                   0 
                   1 
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 5 Data size: 138 Basic stats: COMPLETE Column stats: NONE
+                Statistics: Num rows: 5 Data size: 145 Basic stats: COMPLETE Column stats: NONE
                 Select Operator
                   expressions: 3 (type: int), _col0 (type: double), 3 (type: int), _col1 (type: double)
                   outputColumnNames: _col0, _col1, _col2, _col3
-                  Statistics: Num rows: 5 Data size: 138 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 5 Data size: 145 Basic stats: COMPLETE Column stats: NONE
                   File Output Operator
                     compressed: false
-                    Statistics: Num rows: 5 Data size: 138 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 5 Data size: 145 Basic stats: COMPLETE Column stats: NONE
                     table:
                         input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -451,34 +456,34 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: cmv_basetable
-                  Statistics: Num rows: 41 Data size: 15680 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 30 Data size: 21960 Basic stats: COMPLETE Column stats: NONE
                   Filter Operator
                     predicate: (a = 3) (type: boolean)
-                    Statistics: Num rows: 5 Data size: 1912 Basic stats: COMPLETE Column stats: NONE
+                    Statistics: Num rows: 5 Data size: 3660 Basic stats: COMPLETE Column stats: NONE
                     Select Operator
-                      expressions: CAST( t AS timestamp with local time zone) (type: timestamp with local time zone), 3 (type: int), b (type: varchar(256)), c (type: double)
-                      outputColumnNames: _col0, _col1, _col2, _col3
-                      Statistics: Num rows: 5 Data size: 1912 Basic stats: COMPLETE Column stats: NONE
+                      expressions: CAST( t AS timestamp with local time zone) (type: timestamp with local time zone), 3 (type: int), b (type: varchar(256)), c (type: double), userid (type: varchar(256))
+                      outputColumnNames: _col0, _col1, _col2, _col3, _col4
+                      Statistics: Num rows: 5 Data size: 3660 Basic stats: COMPLETE Column stats: NONE
                       Select Operator
-                        expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: varchar(256)), _col3 (type: double), floor_hour(CAST( GenericUDFEpochMilli(_col0) AS TIMESTAMP)) (type: timestamp)
-                        outputColumnNames: _col0, _col1, _col2, _col3, __time_granularity
-                        Statistics: Num rows: 5 Data size: 1912 Basic stats: COMPLETE Column stats: NONE
+                        expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: varchar(256)), _col3 (type: double), _col4 (type: varchar(256)), floor_hour(CAST( GenericUDFEpochMilli(_col0) AS TIMESTAMP)) (type: timestamp)
+                        outputColumnNames: _col0, _col1, _col2, _col3, _col4, __time_granularity
+                        Statistics: Num rows: 5 Data size: 3660 Basic stats: COMPLETE Column stats: NONE
                         Reduce Output Operator
                           key expressions: __time_granularity (type: timestamp)
                           sort order: +
                           Map-reduce partition columns: __time_granularity (type: timestamp)
-                          Statistics: Num rows: 5 Data size: 1912 Basic stats: COMPLETE Column stats: NONE
-                          value expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: varchar(256)), _col3 (type: double)
+                          Statistics: Num rows: 5 Data size: 3660 Basic stats: COMPLETE Column stats: NONE
+                          value expressions: _col0 (type: timestamp with local time zone), _col1 (type: int), _col2 (type: varchar(256)), _col3 (type: double), _col4 (type: varchar(256))
         Reducer 2 
             Reduce Operator Tree:
               Select Operator
-                expressions: VALUE._col0 (type: timestamp with local time zone), VALUE._col1 (type: int), VALUE._col2 (type: varchar(256)), VALUE._col3 (type: double), KEY.__time_granularity (type: timestamp)
-                outputColumnNames: _col0, _col1, _col2, _col3, __time_granularity
-                Statistics: Num rows: 5 Data size: 1912 Basic stats: COMPLETE Column stats: NONE
+                expressions: VALUE._col0 (type: timestamp with local time zone), VALUE._col1 (type: int), VALUE._col2 (type: varchar(256)), VALUE._col3 (type: double), VALUE._col4 (type: varchar(256)), KEY.__time_granularity (type: timestamp)
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, __time_granularity
+                Statistics: Num rows: 5 Data size: 3660 Basic stats: COMPLETE Column stats: NONE
                 File Output Operator
                   compressed: false
                   Dp Sort State: PARTITION_SORTED
-                  Statistics: Num rows: 5 Data size: 1912 Basic stats: COMPLETE Column stats: NONE
+                  Statistics: Num rows: 5 Data size: 3660 Basic stats: COMPLETE Column stats: NONE
                   table:
                       input format: org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat
                       output format: org.apache.hadoop.hive.druid.io.DruidOutputFormat
@@ -506,7 +511,7 @@ rawDataSize	0
 storage_handler	org.apache.hadoop.hive.druid.DruidStorageHandler
 totalSize	0
 #### A masked pattern was here ####
-Warning: Shuffle Join MERGEJOIN[13][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+Warning: Shuffle Join MERGEJOIN[8][tables = [cmv_mat_view2, cmv_basetable]] in Stage 'Reducer 2' is a cross product
 PREHOOK: query: EXPLAIN
 SELECT * FROM (
   (SELECT a, c FROM cmv_basetable WHERE a = 3) table1
@@ -536,35 +541,26 @@ STAGE PLANS:
         Map 1 
             Map Operator Tree:
                 TableScan
-                  alias: cmv_basetable
-                  Statistics: Num rows: 41 Data size: 480 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (a = 3) (type: boolean)
-                    Statistics: Num rows: 5 Data size: 58 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: c (type: double)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 5 Data size: 58 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 5 Data size: 58 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: double)
+                  alias: cmv_mat_view2
+                  properties:
+                    druid.fieldNames vc,a,b,c,userid
+                    druid.fieldTypes timestamp with local time zone,int,varchar(256),double,varchar(256)
+                    druid.query.json {"queryType":"scan","dataSource":"default.cmv_mat_view2","intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"columns":["vc","a","b","c","userid"],"resultFormat":"compactedList"}
+                    druid.query.type scan
+                  Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    sort order: 
+                    Statistics: Num rows: 3 Data size: 36 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: a (type: int), c (type: double)
         Map 3 
             Map Operator Tree:
                 TableScan
                   alias: cmv_basetable
-                  Statistics: Num rows: 41 Data size: 640 Basic stats: COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: ((a = 3) and (d = 3)) (type: boolean)
-                    Statistics: Num rows: 1 Data size: 15 Basic stats: COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: c (type: double)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 1 Data size: 15 Basic stats: COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        sort order: 
-                        Statistics: Num rows: 1 Data size: 15 Basic stats: COMPLETE Column stats: NONE
-                        value expressions: _col0 (type: double)
+                  Statistics: Num rows: 30 Data size: 480 Basic stats: COMPLETE Column stats: NONE
+                  Reduce Output Operator
+                    sort order: 
+                    Statistics: Num rows: 30 Data size: 480 Basic stats: COMPLETE Column stats: NONE
+                    value expressions: a (type: int), c (type: double), d (type: int)
         Reducer 2 
             Reduce Operator Tree:
               Merge Join Operator
@@ -573,19 +569,22 @@ STAGE PLANS:
                 keys:
                   0 
                   1 
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 5 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: 3 (type: int), _col0 (type: double), 3 (type: int), _col1 (type: double)
-                  outputColumnNames: _col0, _col1, _col2, _col3
-                  Statistics: Num rows: 5 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 5 Data size: 138 Basic stats: COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                outputColumnNames: _col1, _col3, _col9, _col12, _col13
+                Statistics: Num rows: 90 Data size: 2610 Basic stats: COMPLETE Column stats: NONE
+                Filter Operator
+                  predicate: ((_col13 = 3) and (_col9 = 3)) (type: boolean)
+                  Statistics: Num rows: 22 Data size: 638 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col1 (type: int), _col3 (type: double), _col1 (type: int), _col12 (type: double)
+                    outputColumnNames: _col0, _col1, _col2, _col3
+                    Statistics: Num rows: 22 Data size: 638 Basic stats: COMPLETE Column stats: NONE
+                    File Output Operator
+                      compressed: false
+                      Statistics: Num rows: 22 Data size: 638 Basic stats: COMPLETE Column stats: NONE
+                      table:
+                          input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
     Fetch Operator
@@ -593,7 +592,7 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Shuffle Join MERGEJOIN[13][tables = [$hdt$_0, $hdt$_1]] in Stage 'Reducer 2' is a cross product
+Warning: Shuffle Join MERGEJOIN[8][tables = [cmv_mat_view2, cmv_basetable]] in Stage 'Reducer 2' is a cross product
 PREHOOK: query: SELECT * FROM (
   (SELECT a, c FROM cmv_basetable WHERE a = 3) table1
   JOIN
@@ -601,6 +600,7 @@ PREHOOK: query: SELECT * FROM (
   ON table1.a = table2.a)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@cmv_basetable
+PREHOOK: Input: default@cmv_mat_view2
 PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM (
   (SELECT a, c FROM cmv_basetable WHERE a = 3) table1
@@ -609,11 +609,12 @@ POSTHOOK: query: SELECT * FROM (
   ON table1.a = table2.a)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@cmv_basetable
+POSTHOOK: Input: default@cmv_mat_view2
 POSTHOOK: Output: hdfs://### HDFS PATH ###
-3	15.8	3	978.76
-3	15.8	3	978.76
-3	9.8	3	978.76
-3	978.76	3	978.76
+3	15.800000190734863	3	978.76
+3	15.800000190734863	3	978.76
+3	9.800000190734863	3	978.76
+3	978.760009765625	3	978.76
 PREHOOK: query: DROP MATERIALIZED VIEW cmv_mat_view
 PREHOOK: type: DROP_MATERIALIZED_VIEW
 PREHOOK: Input: default@cmv_mat_view