You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2018/04/14 00:49:21 UTC

hive git commit: HIVE-19155 : Day time saving cause Druid inserts to fail with org.apache.hive.druid.io.druid.java.util.common.UOE: Cannot add overlapping segments (Slim Bouguerra via Ashutosh Chauhan)

Repository: hive
Updated Branches:
  refs/heads/master b14113be4 -> 24969a8de


HIVE-19155 : Day time saving cause Druid inserts to fail with org.apache.hive.druid.io.druid.java.util.common.UOE: Cannot add overlapping segments (Slim Bouguerra via Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan <ha...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/24969a8d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/24969a8d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/24969a8d

Branch: refs/heads/master
Commit: 24969a8de57428a1f88945640a272ec164df66c9
Parents: b14113b
Author: Slim Bouguerra <sl...@gmail.com>
Authored: Fri Apr 13 17:48:38 2018 -0700
Committer: Ashutosh Chauhan <ha...@apache.org>
Committed: Fri Apr 13 17:48:38 2018 -0700

----------------------------------------------------------------------
 .../hadoop/hive/druid/io/DruidRecordWriter.java |  30 +-
 .../clientpositive/druidmini_test_insert.q      |  64 +++
 .../druid/druidmini_test_insert.q.out           | 533 +++++++++++++++++++
 3 files changed, 617 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/24969a8d/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java
index 7d2bb91..8ab34a8 100644
--- a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java
+++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java
@@ -29,6 +29,7 @@ import com.google.common.collect.Lists;
 import io.druid.data.input.Committer;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.MapBasedInputRow;
+import io.druid.java.util.common.DateTimes;
 import io.druid.java.util.common.granularity.Granularity;
 import io.druid.segment.indexing.DataSchema;
 import io.druid.segment.indexing.RealtimeTuningConfig;
@@ -110,7 +111,7 @@ public class DruidRecordWriter implements RecordWriter<NullWritable, DruidWritab
                     DruidStorageHandlerUtils.INDEX_IO, DruidStorageHandlerUtils.INDEX_MERGER_V9
             );
     this.maxPartitionSize = maxPartitionSize;
-    appenderator.startJob(); // maybe we need to move this out of the constructor
+    appenderator.startJob();
     this.segmentsDescriptorDir = Preconditions
             .checkNotNull(segmentsDescriptorsDir, "segmentsDescriptorsDir is null");
     this.fileSystem = Preconditions.checkNotNull(fileSystem, "file system is null");
@@ -129,10 +130,12 @@ public class DruidRecordWriter implements RecordWriter<NullWritable, DruidWritab
    * @return segmentIdentifier with of the truncatedTime and maybe push the current open segment.
    */
   private SegmentIdentifier getSegmentIdentifierAndMaybePush(long truncatedTime) {
-    final Interval interval = new Interval(
-            new DateTime(truncatedTime),
-            segmentGranularity.increment(new DateTime(truncatedTime))
-    );
+
+    DateTime truncatedDateTime = segmentGranularity.bucketStart(DateTimes.utc(truncatedTime));
+      final Interval interval = new Interval(
+          truncatedDateTime,
+          segmentGranularity.increment(truncatedDateTime)
+      );
 
     SegmentIdentifier retVal;
     if (currentOpenSegment == null) {
@@ -239,8 +242,6 @@ public class DruidRecordWriter implements RecordWriter<NullWritable, DruidWritab
     DruidWritable record = (DruidWritable) w;
     final long timestamp =
         (long) record.getValue().get(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN);
-    final long truncatedTime =
-        (long) record.getValue().get(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME);
     final int partitionNumber = Math.toIntExact(
         (long) record.getValue().getOrDefault(Constants.DRUID_SHARD_KEY_COL_NAME, -1l));
     final InputRow inputRow = new MapBasedInputRow(timestamp,
@@ -249,9 +250,17 @@ public class DruidRecordWriter implements RecordWriter<NullWritable, DruidWritab
     );
 
     try {
+
       if (partitionNumber != -1 && maxPartitionSize == -1) {
-        final Interval interval = new Interval(new DateTime(truncatedTime),
-            segmentGranularity.increment(new DateTime(truncatedTime))
+        /*
+        Case data is sorted by time and an extra hashing dimension see DRUID_SHARD_KEY_COL_NAME
+        Thus use DRUID_SHARD_KEY_COL_NAME as segment partition in addition to time dimension
+        Data with the same DRUID_SHARD_KEY_COL_NAME and Time interval will end in the same segment
+        */
+        DateTime truncatedDateTime = segmentGranularity.bucketStart(DateTimes.utc(timestamp));
+        final Interval interval = new Interval(
+            truncatedDateTime,
+            segmentGranularity.increment(truncatedDateTime)
         );
 
         if (currentOpenSegment != null) {
@@ -273,8 +282,9 @@ public class DruidRecordWriter implements RecordWriter<NullWritable, DruidWritab
         appenderator.add(currentOpenSegment, inputRow, committerSupplier);
 
       } else if (partitionNumber == -1 && maxPartitionSize != -1) {
+        /*Case we are partitioning the segments based on time and max row per segment maxPartitionSize*/
         appenderator
-            .add(getSegmentIdentifierAndMaybePush(truncatedTime), inputRow, committerSupplier);
+            .add(getSegmentIdentifierAndMaybePush(timestamp), inputRow, committerSupplier);
       } else {
         throw new IllegalArgumentException(String.format(
             "partitionNumber and  maxPartitionSize should be mutually exclusive got partitionNum [%s] and maxPartitionSize [%s]",

http://git-wip-us.apache.org/repos/asf/hive/blob/24969a8d/ql/src/test/queries/clientpositive/druidmini_test_insert.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/druidmini_test_insert.q b/ql/src/test/queries/clientpositive/druidmini_test_insert.q
index 558e246..47199b9 100644
--- a/ql/src/test/queries/clientpositive/druidmini_test_insert.q
+++ b/ql/src/test/queries/clientpositive/druidmini_test_insert.q
@@ -51,3 +51,67 @@ SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,
 SELECT COUNT(*) FROM druid_alltypesorc;
 
 DROP TABLE druid_alltypesorc;
+
+
+-- Day light saving time test insert into test
+
+create database druid_test_dst;
+use druid_test_dst;
+
+create table test_base_table(`timecolumn` timestamp, `userid` string, `num_l` float);
+insert into test_base_table values ('2015-03-08 00:00:00', 'i1-start', 4);
+insert into test_base_table values ('2015-03-08 23:59:59', 'i1-end', 1);
+insert into test_base_table values ('2015-03-09 00:00:00', 'i2-start', 4);
+insert into test_base_table values ('2015-03-09 23:59:59', 'i2-end', 1);
+insert into test_base_table values ('2015-03-10 00:00:00', 'i3-start', 2);
+insert into test_base_table values ('2015-03-10 23:59:59', 'i3-end', 2);
+
+CREATE TABLE druid_test_table
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY")
+AS
+select cast(`timecolumn` as timestamp with local time zone) as `__time`, `userid`, `num_l` FROM test_base_table;
+
+select * FROM druid_test_table;
+
+select * from druid_test_table where `__time` = cast('2015-03-08 00:00:00' as timestamp with local time zone);
+select * from druid_test_table where `__time` = cast('2015-03-08 23:59:59' as timestamp with local time zone);
+
+select * from druid_test_table where `__time` = cast('2015-03-09 00:00:00' as timestamp with local time zone);
+select * from druid_test_table where `__time` = cast('2015-03-09 23:59:59' as timestamp with local time zone);
+
+select * from druid_test_table where `__time` = cast('2015-03-10 00:00:00' as timestamp with local time zone);
+select * from druid_test_table where `__time` = cast('2015-03-10 23:59:59' as timestamp with local time zone);
+
+
+explain select * from druid_test_table where `__time` = cast('2015-03-08 00:00:00' as timestamp with local time zone);
+explain select * from druid_test_table where `__time` = cast('2015-03-08 23:59:59' as timestamp with local time zone);
+
+explain select * from druid_test_table where `__time` = cast('2015-03-09 00:00:00' as timestamp with local time zone);
+explain select * from druid_test_table where `__time` = cast('2015-03-09 23:59:59' as timestamp with local time zone);
+
+explain select * from druid_test_table where `__time` = cast('2015-03-10 00:00:00' as timestamp with local time zone);
+explain select * from druid_test_table where `__time` = cast('2015-03-10 23:59:59' as timestamp with local time zone);
+
+
+select * from druid_test_table where `__time` = cast('2015-03-08 00:00:00' as timestamp );
+select * from druid_test_table where `__time` = cast('2015-03-08 23:59:59' as timestamp );
+
+select * from druid_test_table where `__time` = cast('2015-03-09 00:00:00' as timestamp );
+select * from druid_test_table where `__time` = cast('2015-03-09 23:59:59' as timestamp );
+
+select * from druid_test_table where `__time` = cast('2015-03-10 00:00:00' as timestamp );
+select * from druid_test_table where `__time` = cast('2015-03-10 23:59:59' as timestamp );
+
+
+EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-08 00:00:00' as timestamp );
+EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-08 23:59:59' as timestamp );
+
+EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-09 00:00:00' as timestamp );
+EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-09 23:59:59' as timestamp );
+
+EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-10 00:00:00' as timestamp );
+EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-10 23:59:59' as timestamp );
+
+DROP TABLE test_base_table;
+DROP TABLE druid_test_table;

http://git-wip-us.apache.org/repos/asf/hive/blob/24969a8d/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out b/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out
index 8b79f6a..482554b 100644
--- a/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out
+++ b/ql/src/test/results/clientpositive/druid/druidmini_test_insert.q.out
@@ -148,3 +148,536 @@ POSTHOOK: query: DROP TABLE druid_alltypesorc
 POSTHOOK: type: DROPTABLE
 POSTHOOK: Input: default@druid_alltypesorc
 POSTHOOK: Output: default@druid_alltypesorc
+PREHOOK: query: create database druid_test_dst
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:druid_test_dst
+POSTHOOK: query: create database druid_test_dst
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:druid_test_dst
+PREHOOK: query: use druid_test_dst
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:druid_test_dst
+POSTHOOK: query: use druid_test_dst
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:druid_test_dst
+PREHOOK: query: create table test_base_table(`timecolumn` timestamp, `userid` string, `num_l` float)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:druid_test_dst
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: create table test_base_table(`timecolumn` timestamp, `userid` string, `num_l` float)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:druid_test_dst
+POSTHOOK: Output: druid_test_dst@test_base_table
+PREHOOK: query: insert into test_base_table values ('2015-03-08 00:00:00', 'i1-start', 4)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: insert into test_base_table values ('2015-03-08 00:00:00', 'i1-start', 4)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
+POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
+POSTHOOK: Lineage: test_base_table.userid SCRIPT []
+PREHOOK: query: insert into test_base_table values ('2015-03-08 23:59:59', 'i1-end', 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: insert into test_base_table values ('2015-03-08 23:59:59', 'i1-end', 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
+POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
+POSTHOOK: Lineage: test_base_table.userid SCRIPT []
+PREHOOK: query: insert into test_base_table values ('2015-03-09 00:00:00', 'i2-start', 4)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: insert into test_base_table values ('2015-03-09 00:00:00', 'i2-start', 4)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
+POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
+POSTHOOK: Lineage: test_base_table.userid SCRIPT []
+PREHOOK: query: insert into test_base_table values ('2015-03-09 23:59:59', 'i2-end', 1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: insert into test_base_table values ('2015-03-09 23:59:59', 'i2-end', 1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
+POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
+POSTHOOK: Lineage: test_base_table.userid SCRIPT []
+PREHOOK: query: insert into test_base_table values ('2015-03-10 00:00:00', 'i3-start', 2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: insert into test_base_table values ('2015-03-10 00:00:00', 'i3-start', 2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
+POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
+POSTHOOK: Lineage: test_base_table.userid SCRIPT []
+PREHOOK: query: insert into test_base_table values ('2015-03-10 23:59:59', 'i3-end', 2)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: insert into test_base_table values ('2015-03-10 23:59:59', 'i3-end', 2)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: Lineage: test_base_table.num_l SCRIPT []
+POSTHOOK: Lineage: test_base_table.timecolumn SCRIPT []
+POSTHOOK: Lineage: test_base_table.userid SCRIPT []
+PREHOOK: query: CREATE TABLE druid_test_table
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY")
+AS
+select cast(`timecolumn` as timestamp with local time zone) as `__time`, `userid`, `num_l` FROM test_base_table
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: druid_test_dst@test_base_table
+PREHOOK: Output: database:druid_test_dst
+PREHOOK: Output: druid_test_dst@druid_test_table
+POSTHOOK: query: CREATE TABLE druid_test_table
+STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
+TBLPROPERTIES ("druid.segment.granularity" = "DAY")
+AS
+select cast(`timecolumn` as timestamp with local time zone) as `__time`, `userid`, `num_l` FROM test_base_table
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: druid_test_dst@test_base_table
+POSTHOOK: Output: database:druid_test_dst
+POSTHOOK: Output: druid_test_dst@druid_test_table
+POSTHOOK: Lineage: druid_test_table.__time EXPRESSION [(test_base_table)test_base_table.FieldSchema(name:timecolumn, type:timestamp, comment:null), ]
+POSTHOOK: Lineage: druid_test_table.num_l SIMPLE [(test_base_table)test_base_table.FieldSchema(name:num_l, type:float, comment:null), ]
+POSTHOOK: Lineage: druid_test_table.userid SIMPLE [(test_base_table)test_base_table.FieldSchema(name:userid, type:string, comment:null), ]
+PREHOOK: query: select * FROM druid_test_table
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * FROM druid_test_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-08 00:00:00.0 US/Pacific	i1-start	4.0
+2015-03-08 23:59:59.0 US/Pacific	i1-end	1.0
+2015-03-09 00:00:00.0 US/Pacific	i2-start	4.0
+2015-03-09 23:59:59.0 US/Pacific	i2-end	1.0
+2015-03-10 00:00:00.0 US/Pacific	i3-start	2.0
+2015-03-10 23:59:59.0 US/Pacific	i3-end	2.0
+PREHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-08 00:00:00' as timestamp with local time zone)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-08 00:00:00' as timestamp with local time zone)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-08 00:00:00.0 US/Pacific	i1-start	4.0
+PREHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-08 23:59:59' as timestamp with local time zone)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-08 23:59:59' as timestamp with local time zone)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-08 23:59:59.0 US/Pacific	i1-end	1.0
+PREHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-09 00:00:00' as timestamp with local time zone)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-09 00:00:00' as timestamp with local time zone)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-09 00:00:00.0 US/Pacific	i2-start	4.0
+PREHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-09 23:59:59' as timestamp with local time zone)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-09 23:59:59' as timestamp with local time zone)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-09 23:59:59.0 US/Pacific	i2-end	1.0
+PREHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-10 00:00:00' as timestamp with local time zone)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-10 00:00:00' as timestamp with local time zone)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-10 00:00:00.0 US/Pacific	i3-start	2.0
+PREHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-10 23:59:59' as timestamp with local time zone)
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-10 23:59:59' as timestamp with local time zone)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-10 23:59:59.0 US/Pacific	i3-end	2.0
+PREHOOK: query: explain select * from druid_test_table where `__time` = cast('2015-03-08 00:00:00' as timestamp with local time zone)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select * from druid_test_table where `__time` = cast('2015-03-08 00:00:00' as timestamp with local time zone)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_test_table
+          properties:
+            druid.fieldNames userid,num_l
+            druid.fieldTypes string,float
+            druid.query.json {"queryType":"scan","dataSource":"druid_test_dst.druid_test_table","intervals":["2015-03-08T08:00:00.000Z/2015-03-08T08:00:00.001Z"],"columns":["userid","num_l"],"resultFormat":"compactedList"}
+            druid.query.type scan
+          Select Operator
+            expressions: TIMESTAMPLOCALTZ'2015-03-08 00:00:00.0 US/Pacific' (type: timestamp with local time zone), userid (type: string), num_l (type: float)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: explain select * from druid_test_table where `__time` = cast('2015-03-08 23:59:59' as timestamp with local time zone)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select * from druid_test_table where `__time` = cast('2015-03-08 23:59:59' as timestamp with local time zone)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_test_table
+          properties:
+            druid.fieldNames userid,num_l
+            druid.fieldTypes string,float
+            druid.query.json {"queryType":"scan","dataSource":"druid_test_dst.druid_test_table","intervals":["2015-03-09T06:59:59.000Z/2015-03-09T06:59:59.001Z"],"columns":["userid","num_l"],"resultFormat":"compactedList"}
+            druid.query.type scan
+          Select Operator
+            expressions: TIMESTAMPLOCALTZ'2015-03-08 23:59:59.0 US/Pacific' (type: timestamp with local time zone), userid (type: string), num_l (type: float)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: explain select * from druid_test_table where `__time` = cast('2015-03-09 00:00:00' as timestamp with local time zone)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select * from druid_test_table where `__time` = cast('2015-03-09 00:00:00' as timestamp with local time zone)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_test_table
+          properties:
+            druid.fieldNames userid,num_l
+            druid.fieldTypes string,float
+            druid.query.json {"queryType":"scan","dataSource":"druid_test_dst.druid_test_table","intervals":["2015-03-09T07:00:00.000Z/2015-03-09T07:00:00.001Z"],"columns":["userid","num_l"],"resultFormat":"compactedList"}
+            druid.query.type scan
+          Select Operator
+            expressions: TIMESTAMPLOCALTZ'2015-03-09 00:00:00.0 US/Pacific' (type: timestamp with local time zone), userid (type: string), num_l (type: float)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: explain select * from druid_test_table where `__time` = cast('2015-03-09 23:59:59' as timestamp with local time zone)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select * from druid_test_table where `__time` = cast('2015-03-09 23:59:59' as timestamp with local time zone)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_test_table
+          properties:
+            druid.fieldNames userid,num_l
+            druid.fieldTypes string,float
+            druid.query.json {"queryType":"scan","dataSource":"druid_test_dst.druid_test_table","intervals":["2015-03-10T06:59:59.000Z/2015-03-10T06:59:59.001Z"],"columns":["userid","num_l"],"resultFormat":"compactedList"}
+            druid.query.type scan
+          Select Operator
+            expressions: TIMESTAMPLOCALTZ'2015-03-09 23:59:59.0 US/Pacific' (type: timestamp with local time zone), userid (type: string), num_l (type: float)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: explain select * from druid_test_table where `__time` = cast('2015-03-10 00:00:00' as timestamp with local time zone)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select * from druid_test_table where `__time` = cast('2015-03-10 00:00:00' as timestamp with local time zone)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_test_table
+          properties:
+            druid.fieldNames userid,num_l
+            druid.fieldTypes string,float
+            druid.query.json {"queryType":"scan","dataSource":"druid_test_dst.druid_test_table","intervals":["2015-03-10T07:00:00.000Z/2015-03-10T07:00:00.001Z"],"columns":["userid","num_l"],"resultFormat":"compactedList"}
+            druid.query.type scan
+          Select Operator
+            expressions: TIMESTAMPLOCALTZ'2015-03-10 00:00:00.0 US/Pacific' (type: timestamp with local time zone), userid (type: string), num_l (type: float)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: explain select * from druid_test_table where `__time` = cast('2015-03-10 23:59:59' as timestamp with local time zone)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select * from druid_test_table where `__time` = cast('2015-03-10 23:59:59' as timestamp with local time zone)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_test_table
+          properties:
+            druid.fieldNames userid,num_l
+            druid.fieldTypes string,float
+            druid.query.json {"queryType":"scan","dataSource":"druid_test_dst.druid_test_table","intervals":["2015-03-11T06:59:59.000Z/2015-03-11T06:59:59.001Z"],"columns":["userid","num_l"],"resultFormat":"compactedList"}
+            druid.query.type scan
+          Select Operator
+            expressions: TIMESTAMPLOCALTZ'2015-03-10 23:59:59.0 US/Pacific' (type: timestamp with local time zone), userid (type: string), num_l (type: float)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-08 00:00:00' as timestamp )
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-08 00:00:00' as timestamp )
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-08 00:00:00.0 US/Pacific	i1-start	4.0
+PREHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-08 23:59:59' as timestamp )
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-08 23:59:59' as timestamp )
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-08 23:59:59.0 US/Pacific	i1-end	1.0
+PREHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-09 00:00:00' as timestamp )
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-09 00:00:00' as timestamp )
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-09 00:00:00.0 US/Pacific	i2-start	4.0
+PREHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-09 23:59:59' as timestamp )
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-09 23:59:59' as timestamp )
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-09 23:59:59.0 US/Pacific	i2-end	1.0
+PREHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-10 00:00:00' as timestamp )
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-10 00:00:00' as timestamp )
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-10 00:00:00.0 US/Pacific	i3-start	2.0
+PREHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-10 23:59:59' as timestamp )
+PREHOOK: type: QUERY
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from druid_test_table where `__time` = cast('2015-03-10 23:59:59' as timestamp )
+POSTHOOK: type: QUERY
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+2015-03-10 23:59:59.0 US/Pacific	i3-end	2.0
+PREHOOK: query: EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-08 00:00:00' as timestamp )
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-08 00:00:00' as timestamp )
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_test_table
+          properties:
+            druid.fieldNames userid,num_l
+            druid.fieldTypes string,float
+            druid.query.json {"queryType":"scan","dataSource":"druid_test_dst.druid_test_table","intervals":["2015-03-08T08:00:00.000Z/2015-03-08T08:00:00.001Z"],"columns":["userid","num_l"],"resultFormat":"compactedList"}
+            druid.query.type scan
+          Select Operator
+            expressions: TIMESTAMPLOCALTZ'2015-03-08 00:00:00.0 US/Pacific' (type: timestamp with local time zone), userid (type: string), num_l (type: float)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-08 23:59:59' as timestamp )
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-08 23:59:59' as timestamp )
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_test_table
+          properties:
+            druid.fieldNames userid,num_l
+            druid.fieldTypes string,float
+            druid.query.json {"queryType":"scan","dataSource":"druid_test_dst.druid_test_table","intervals":["2015-03-09T06:59:59.000Z/2015-03-09T06:59:59.001Z"],"columns":["userid","num_l"],"resultFormat":"compactedList"}
+            druid.query.type scan
+          Select Operator
+            expressions: TIMESTAMPLOCALTZ'2015-03-08 23:59:59.0 US/Pacific' (type: timestamp with local time zone), userid (type: string), num_l (type: float)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-09 00:00:00' as timestamp )
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-09 00:00:00' as timestamp )
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_test_table
+          properties:
+            druid.fieldNames userid,num_l
+            druid.fieldTypes string,float
+            druid.query.json {"queryType":"scan","dataSource":"druid_test_dst.druid_test_table","intervals":["2015-03-09T07:00:00.000Z/2015-03-09T07:00:00.001Z"],"columns":["userid","num_l"],"resultFormat":"compactedList"}
+            druid.query.type scan
+          Select Operator
+            expressions: TIMESTAMPLOCALTZ'2015-03-09 00:00:00.0 US/Pacific' (type: timestamp with local time zone), userid (type: string), num_l (type: float)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-09 23:59:59' as timestamp )
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-09 23:59:59' as timestamp )
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_test_table
+          properties:
+            druid.fieldNames userid,num_l
+            druid.fieldTypes string,float
+            druid.query.json {"queryType":"scan","dataSource":"druid_test_dst.druid_test_table","intervals":["2015-03-10T06:59:59.000Z/2015-03-10T06:59:59.001Z"],"columns":["userid","num_l"],"resultFormat":"compactedList"}
+            druid.query.type scan
+          Select Operator
+            expressions: TIMESTAMPLOCALTZ'2015-03-09 23:59:59.0 US/Pacific' (type: timestamp with local time zone), userid (type: string), num_l (type: float)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-10 00:00:00' as timestamp )
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-10 00:00:00' as timestamp )
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_test_table
+          properties:
+            druid.fieldNames userid,num_l
+            druid.fieldTypes string,float
+            druid.query.json {"queryType":"scan","dataSource":"druid_test_dst.druid_test_table","intervals":["2015-03-10T07:00:00.000Z/2015-03-10T07:00:00.001Z"],"columns":["userid","num_l"],"resultFormat":"compactedList"}
+            druid.query.type scan
+          Select Operator
+            expressions: TIMESTAMPLOCALTZ'2015-03-10 00:00:00.0 US/Pacific' (type: timestamp with local time zone), userid (type: string), num_l (type: float)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-10 23:59:59' as timestamp )
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN select * from druid_test_table where `__time` = cast('2015-03-10 23:59:59' as timestamp )
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: druid_test_table
+          properties:
+            druid.fieldNames userid,num_l
+            druid.fieldTypes string,float
+            druid.query.json {"queryType":"scan","dataSource":"druid_test_dst.druid_test_table","intervals":["2015-03-11T06:59:59.000Z/2015-03-11T06:59:59.001Z"],"columns":["userid","num_l"],"resultFormat":"compactedList"}
+            druid.query.type scan
+          Select Operator
+            expressions: TIMESTAMPLOCALTZ'2015-03-10 23:59:59.0 US/Pacific' (type: timestamp with local time zone), userid (type: string), num_l (type: float)
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
+
+PREHOOK: query: DROP TABLE test_base_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: druid_test_dst@test_base_table
+PREHOOK: Output: druid_test_dst@test_base_table
+POSTHOOK: query: DROP TABLE test_base_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: druid_test_dst@test_base_table
+POSTHOOK: Output: druid_test_dst@test_base_table
+PREHOOK: query: DROP TABLE druid_test_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: druid_test_dst@druid_test_table
+PREHOOK: Output: druid_test_dst@druid_test_table
+POSTHOOK: query: DROP TABLE druid_test_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: druid_test_dst@druid_test_table
+POSTHOOK: Output: druid_test_dst@druid_test_table