You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2016/12/15 20:51:36 UTC

[2/4] hive git commit: HIVE-15277: Teach Hive how to create/delete Druid segments (Slim Bouguerra, reviewed by Jesus Camacho Rodriguez)

http://git-wip-us.apache.org/repos/asf/hive/blob/590687bc/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java
index 1343939..a495165 100644
--- a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java
+++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java
@@ -74,374 +74,408 @@ public class TestDruidSerDe {
   // Timeseries query
   private static final String TIMESERIES_QUERY =
           "{  \"queryType\": \"timeseries\", "
-          + " \"dataSource\": \"sample_datasource\", "
-          + " \"granularity\": \"day\", "
-          + " \"descending\": \"true\", "
-          + " \"filter\": {  "
-          + "  \"type\": \"and\",  "
-          + "  \"fields\": [   "
-          + "   { \"type\": \"selector\", \"dimension\": \"sample_dimension1\", \"value\": \"sample_value1\" },   "
-          + "   { \"type\": \"or\",    "
-          + "    \"fields\": [     "
-          + "     { \"type\": \"selector\", \"dimension\": \"sample_dimension2\", \"value\": \"sample_value2\" },     "
-          + "     { \"type\": \"selector\", \"dimension\": \"sample_dimension3\", \"value\": \"sample_value3\" }    "
-          + "    ]   "
-          + "   }  "
-          + "  ] "
-          + " }, "
-          + " \"aggregations\": [  "
-          + "  { \"type\": \"longSum\", \"name\": \"sample_name1\", \"fieldName\": \"sample_fieldName1\" },  "
-          + "  { \"type\": \"doubleSum\", \"name\": \"sample_name2\", \"fieldName\": \"sample_fieldName2\" } "
-          + " ], "
-          + " \"postAggregations\": [  "
-          + "  { \"type\": \"arithmetic\",  "
-          + "    \"name\": \"sample_divide\",  "
-          + "    \"fn\": \"/\",  "
-          + "    \"fields\": [   "
-          + "     { \"type\": \"fieldAccess\", \"name\": \"postAgg__sample_name1\", \"fieldName\": \"sample_name1\" },   "
-          + "     { \"type\": \"fieldAccess\", \"name\": \"postAgg__sample_name2\", \"fieldName\": \"sample_name2\" }  "
-          + "    ]  "
-          + "  } "
-          + " ], "
-          + " \"intervals\": [ \"2012-01-01T00:00:00.000/2012-01-03T00:00:00.000\" ]}";
+                  + " \"dataSource\": \"sample_datasource\", "
+                  + " \"granularity\": \"day\", "
+                  + " \"descending\": \"true\", "
+                  + " \"filter\": {  "
+                  + "  \"type\": \"and\",  "
+                  + "  \"fields\": [   "
+                  + "   { \"type\": \"selector\", \"dimension\": \"sample_dimension1\", \"value\": \"sample_value1\" },   "
+                  + "   { \"type\": \"or\",    "
+                  + "    \"fields\": [     "
+                  + "     { \"type\": \"selector\", \"dimension\": \"sample_dimension2\", \"value\": \"sample_value2\" },     "
+                  + "     { \"type\": \"selector\", \"dimension\": \"sample_dimension3\", \"value\": \"sample_value3\" }    "
+                  + "    ]   "
+                  + "   }  "
+                  + "  ] "
+                  + " }, "
+                  + " \"aggregations\": [  "
+                  + "  { \"type\": \"longSum\", \"name\": \"sample_name1\", \"fieldName\": \"sample_fieldName1\" },  "
+                  + "  { \"type\": \"doubleSum\", \"name\": \"sample_name2\", \"fieldName\": \"sample_fieldName2\" } "
+                  + " ], "
+                  + " \"postAggregations\": [  "
+                  + "  { \"type\": \"arithmetic\",  "
+                  + "    \"name\": \"sample_divide\",  "
+                  + "    \"fn\": \"/\",  "
+                  + "    \"fields\": [   "
+                  + "     { \"type\": \"fieldAccess\", \"name\": \"postAgg__sample_name1\", \"fieldName\": \"sample_name1\" },   "
+                  + "     { \"type\": \"fieldAccess\", \"name\": \"postAgg__sample_name2\", \"fieldName\": \"sample_name2\" }  "
+                  + "    ]  "
+                  + "  } "
+                  + " ], "
+                  + " \"intervals\": [ \"2012-01-01T00:00:00.000/2012-01-03T00:00:00.000\" ]}";
+
   // Timeseries query results
   private static final String TIMESERIES_QUERY_RESULTS =
           "[  "
-          + "{   "
-          + " \"timestamp\": \"2012-01-01T00:00:00.000Z\",   "
-          + " \"result\": { \"sample_name1\": 0, \"sample_name2\": 1.0, \"sample_divide\": 2.2222 }   "
-          + "},  "
-          + "{   "
-          + " \"timestamp\": \"2012-01-02T00:00:00.000Z\",   "
-          + " \"result\": { \"sample_name1\": 2, \"sample_name2\": 3.32, \"sample_divide\": 4 }  "
-          + "}]";
+                  + "{   "
+                  + " \"timestamp\": \"2012-01-01T00:00:00.000Z\",   "
+                  + " \"result\": { \"sample_name1\": 0, \"sample_name2\": 1.0, \"sample_divide\": 2.2222 }   "
+                  + "},  "
+                  + "{   "
+                  + " \"timestamp\": \"2012-01-02T00:00:00.000Z\",   "
+                  + " \"result\": { \"sample_name1\": 2, \"sample_name2\": 3.32, \"sample_divide\": 4 }  "
+                  + "}]";
+
   // Timeseries query results as records
   private static final Object[][] TIMESERIES_QUERY_RESULTS_RECORDS = new Object[][] {
-    new Object[] { new TimestampWritable(new Timestamp(1325376000000L)), new LongWritable(0), new FloatWritable(1.0F), new FloatWritable(2.2222F) } ,
-    new Object[] { new TimestampWritable(new Timestamp(1325462400000L)), new LongWritable(2), new FloatWritable(3.32F), new FloatWritable(4F)}
+          new Object[] { new TimestampWritable(new Timestamp(1325376000000L)), new LongWritable(0),
+                  new FloatWritable(1.0F), new FloatWritable(2.2222F) },
+          new Object[] { new TimestampWritable(new Timestamp(1325462400000L)), new LongWritable(2),
+                  new FloatWritable(3.32F), new FloatWritable(4F) }
   };
 
   // TopN query
   private static final String TOPN_QUERY =
           "{  \"queryType\": \"topN\", "
-          + " \"dataSource\": \"sample_data\", "
-          + " \"dimension\": \"sample_dim\", "
-          + " \"threshold\": 5, "
-          + " \"metric\": \"count\", "
-          + " \"granularity\": \"all\", "
-          + " \"filter\": {  "
-          + "  \"type\": \"and\",  "
-          + "  \"fields\": [   "
-          + "   {    "
-          + "    \"type\": \"selector\",    "
-          + "    \"dimension\": \"dim1\",    "
-          + "    \"value\": \"some_value\"   "
-          + "   },   "
-          + "   {    "
-          + "    \"type\": \"selector\",    "
-          + "    \"dimension\": \"dim2\",    "
-          + "    \"value\": \"some_other_val\"   "
-          + "   }  "
-          + "  ] "
-          + " }, "
-          + " \"aggregations\": [  "
-          + "  {   "
-          + "   \"type\": \"longSum\",   "
-          + "   \"name\": \"count\",   "
-          + "   \"fieldName\": \"count\"  "
-          + "  },  "
-          + "  {   "
-          + "   \"type\": \"doubleSum\",   "
-          + "   \"name\": \"some_metric\",   "
-          + "   \"fieldName\": \"some_metric\"  "
-          + "  } "
-          + " ], "
-          + " \"postAggregations\": [  "
-          + "  {   "
-          + "   \"type\": \"arithmetic\",   "
-          + "   \"name\": \"sample_divide\",   "
-          + "   \"fn\": \"/\",   "
-          + "   \"fields\": [    "
-          + "    {     "
-          + "     \"type\": \"fieldAccess\",     "
-          + "     \"name\": \"some_metric\",     "
-          + "     \"fieldName\": \"some_metric\"    "
-          + "    },    "
-          + "    {     "
-          + "     \"type\": \"fieldAccess\",     "
-          + "     \"name\": \"count\",     "
-          + "     \"fieldName\": \"count\"    "
-          + "    }   "
-          + "   ]  "
-          + "  } "
-          + " ], "
-          + " \"intervals\": [  "
-          + "  \"2013-08-31T00:00:00.000/2013-09-03T00:00:00.000\" "
-          + " ]}";
+                  + " \"dataSource\": \"sample_data\", "
+                  + " \"dimension\": \"sample_dim\", "
+                  + " \"threshold\": 5, "
+                  + " \"metric\": \"count\", "
+                  + " \"granularity\": \"all\", "
+                  + " \"filter\": {  "
+                  + "  \"type\": \"and\",  "
+                  + "  \"fields\": [   "
+                  + "   {    "
+                  + "    \"type\": \"selector\",    "
+                  + "    \"dimension\": \"dim1\",    "
+                  + "    \"value\": \"some_value\"   "
+                  + "   },   "
+                  + "   {    "
+                  + "    \"type\": \"selector\",    "
+                  + "    \"dimension\": \"dim2\",    "
+                  + "    \"value\": \"some_other_val\"   "
+                  + "   }  "
+                  + "  ] "
+                  + " }, "
+                  + " \"aggregations\": [  "
+                  + "  {   "
+                  + "   \"type\": \"longSum\",   "
+                  + "   \"name\": \"count\",   "
+                  + "   \"fieldName\": \"count\"  "
+                  + "  },  "
+                  + "  {   "
+                  + "   \"type\": \"doubleSum\",   "
+                  + "   \"name\": \"some_metric\",   "
+                  + "   \"fieldName\": \"some_metric\"  "
+                  + "  } "
+                  + " ], "
+                  + " \"postAggregations\": [  "
+                  + "  {   "
+                  + "   \"type\": \"arithmetic\",   "
+                  + "   \"name\": \"sample_divide\",   "
+                  + "   \"fn\": \"/\",   "
+                  + "   \"fields\": [    "
+                  + "    {     "
+                  + "     \"type\": \"fieldAccess\",     "
+                  + "     \"name\": \"some_metric\",     "
+                  + "     \"fieldName\": \"some_metric\"    "
+                  + "    },    "
+                  + "    {     "
+                  + "     \"type\": \"fieldAccess\",     "
+                  + "     \"name\": \"count\",     "
+                  + "     \"fieldName\": \"count\"    "
+                  + "    }   "
+                  + "   ]  "
+                  + "  } "
+                  + " ], "
+                  + " \"intervals\": [  "
+                  + "  \"2013-08-31T00:00:00.000/2013-09-03T00:00:00.000\" "
+                  + " ]}";
+
   // TopN query results
   private static final String TOPN_QUERY_RESULTS =
           "[ "
-          + " {  "
-          + "  \"timestamp\": \"2013-08-31T00:00:00.000Z\",  "
-          + "  \"result\": [   "
-          + "   {   "
-          + "     \"sample_dim\": \"dim1_val\",   "
-          + "     \"count\": 111,   "
-          + "     \"some_metric\": 10669,   "
-          + "     \"sample_divide\": 96.11711711711712   "
-          + "   },   "
-          + "   {   "
-          + "     \"sample_dim\": \"another_dim1_val\",   "
-          + "     \"count\": 88,   "
-          + "     \"some_metric\": 28344,   "
-          + "     \"sample_divide\": 322.09090909090907   "
-          + "   },   "
-          + "   {   "
-          + "     \"sample_dim\": \"dim1_val3\",   "
-          + "     \"count\": 70,   "
-          + "     \"some_metric\": 871,   "
-          + "     \"sample_divide\": 12.442857142857143   "
-          + "   },   "
-          + "   {   "
-          + "     \"sample_dim\": \"dim1_val4\",   "
-          + "     \"count\": 62,   "
-          + "     \"some_metric\": 815,   "
-          + "     \"sample_divide\": 13.14516129032258   "
-          + "   },   "
-          + "   {   "
-          + "     \"sample_dim\": \"dim1_val5\",   "
-          + "     \"count\": 60,   "
-          + "     \"some_metric\": 2787,   "
-          + "     \"sample_divide\": 46.45   "
-          + "   }  "
-          + "  ] "
-          + " }]";
+                  + " {  "
+                  + "  \"timestamp\": \"2013-08-31T00:00:00.000Z\",  "
+                  + "  \"result\": [   "
+                  + "   {   "
+                  + "     \"sample_dim\": \"dim1_val\",   "
+                  + "     \"count\": 111,   "
+                  + "     \"some_metric\": 10669,   "
+                  + "     \"sample_divide\": 96.11711711711712   "
+                  + "   },   "
+                  + "   {   "
+                  + "     \"sample_dim\": \"another_dim1_val\",   "
+                  + "     \"count\": 88,   "
+                  + "     \"some_metric\": 28344,   "
+                  + "     \"sample_divide\": 322.09090909090907   "
+                  + "   },   "
+                  + "   {   "
+                  + "     \"sample_dim\": \"dim1_val3\",   "
+                  + "     \"count\": 70,   "
+                  + "     \"some_metric\": 871,   "
+                  + "     \"sample_divide\": 12.442857142857143   "
+                  + "   },   "
+                  + "   {   "
+                  + "     \"sample_dim\": \"dim1_val4\",   "
+                  + "     \"count\": 62,   "
+                  + "     \"some_metric\": 815,   "
+                  + "     \"sample_divide\": 13.14516129032258   "
+                  + "   },   "
+                  + "   {   "
+                  + "     \"sample_dim\": \"dim1_val5\",   "
+                  + "     \"count\": 60,   "
+                  + "     \"some_metric\": 2787,   "
+                  + "     \"sample_divide\": 46.45   "
+                  + "   }  "
+                  + "  ] "
+                  + " }]";
+
   // TopN query results as records
   private static final Object[][] TOPN_QUERY_RESULTS_RECORDS = new Object[][] {
-    new Object[] { new TimestampWritable(new Timestamp(1377907200000L)), new Text("dim1_val"), new LongWritable(111), new FloatWritable(10669F), new FloatWritable(96.11711711711712F) } ,
-    new Object[] { new TimestampWritable(new Timestamp(1377907200000L)), new Text("another_dim1_val"), new LongWritable(88), new FloatWritable(28344F), new FloatWritable(322.09090909090907F) } ,
-    new Object[] { new TimestampWritable(new Timestamp(1377907200000L)), new Text("dim1_val3"), new LongWritable(70), new FloatWritable(871F), new FloatWritable(12.442857142857143F) } ,
-    new Object[] { new TimestampWritable(new Timestamp(1377907200000L)), new Text("dim1_val4"), new LongWritable(62), new FloatWritable(815F), new FloatWritable(13.14516129032258F) } ,
-    new Object[] { new TimestampWritable(new Timestamp(1377907200000L)), new Text("dim1_val5"), new LongWritable(60), new FloatWritable(2787F), new FloatWritable(46.45F) }
+          new Object[] { new TimestampWritable(new Timestamp(1377907200000L)), new Text("dim1_val"),
+                  new LongWritable(111), new FloatWritable(10669F),
+                  new FloatWritable(96.11711711711712F) },
+          new Object[] { new TimestampWritable(new Timestamp(1377907200000L)),
+                  new Text("another_dim1_val"), new LongWritable(88), new FloatWritable(28344F),
+                  new FloatWritable(322.09090909090907F) },
+          new Object[] { new TimestampWritable(new Timestamp(1377907200000L)),
+                  new Text("dim1_val3"), new LongWritable(70), new FloatWritable(871F),
+                  new FloatWritable(12.442857142857143F) },
+          new Object[] { new TimestampWritable(new Timestamp(1377907200000L)),
+                  new Text("dim1_val4"), new LongWritable(62), new FloatWritable(815F),
+                  new FloatWritable(13.14516129032258F) },
+          new Object[] { new TimestampWritable(new Timestamp(1377907200000L)),
+                  new Text("dim1_val5"), new LongWritable(60), new FloatWritable(2787F),
+                  new FloatWritable(46.45F) }
   };
 
   // GroupBy query
   private static final String GROUP_BY_QUERY =
           "{ "
-          + " \"queryType\": \"groupBy\", "
-          + " \"dataSource\": \"sample_datasource\", "
-          + " \"granularity\": \"day\", "
-          + " \"dimensions\": [\"country\", \"device\"], "
-          + " \"limitSpec\": {"
-          + " \"type\": \"default\","
-          + " \"limit\": 5000,"
-          + " \"columns\": [\"country\", \"data_transfer\"] }, "
-          + " \"filter\": {  "
-          + "  \"type\": \"and\",  "
-          + "  \"fields\": [   "
-          + "   { \"type\": \"selector\", \"dimension\": \"carrier\", \"value\": \"AT&T\" },   "
-          + "   { \"type\": \"or\",     "
-          + "    \"fields\": [     "
-          + "     { \"type\": \"selector\", \"dimension\": \"make\", \"value\": \"Apple\" },     "
-          + "     { \"type\": \"selector\", \"dimension\": \"make\", \"value\": \"Samsung\" }    "
-          + "    ]   "
-          + "   }  "
-          + "  ] "
-          + " }, "
-          + " \"aggregations\": [  "
-          + "  { \"type\": \"longSum\", \"name\": \"total_usage\", \"fieldName\": \"user_count\" },  "
-          + "  { \"type\": \"doubleSum\", \"name\": \"data_transfer\", \"fieldName\": \"data_transfer\" } "
-          + " ], "
-          + " \"postAggregations\": [  "
-          + "  { \"type\": \"arithmetic\",  "
-          + "    \"name\": \"avg_usage\",  "
-          + "    \"fn\": \"/\",  "
-          + "    \"fields\": [   "
-          + "     { \"type\": \"fieldAccess\", \"fieldName\": \"data_transfer\" },   "
-          + "     { \"type\": \"fieldAccess\", \"fieldName\": \"total_usage\" }  "
-          + "    ]  "
-          + "  } "
-          + " ], "
-          + " \"intervals\": [ \"2012-01-01T00:00:00.000/2012-01-03T00:00:00.000\" ], "
-          + " \"having\": {  "
-          + "  \"type\": \"greaterThan\",  "
-          + "  \"aggregation\": \"total_usage\",  "
-          + "  \"value\": 100 "
-          + " }}";
+                  + " \"queryType\": \"groupBy\", "
+                  + " \"dataSource\": \"sample_datasource\", "
+                  + " \"granularity\": \"day\", "
+                  + " \"dimensions\": [\"country\", \"device\"], "
+                  + " \"limitSpec\": {"
+                  + " \"type\": \"default\","
+                  + " \"limit\": 5000,"
+                  + " \"columns\": [\"country\", \"data_transfer\"] }, "
+                  + " \"filter\": {  "
+                  + "  \"type\": \"and\",  "
+                  + "  \"fields\": [   "
+                  + "   { \"type\": \"selector\", \"dimension\": \"carrier\", \"value\": \"AT&T\" },   "
+                  + "   { \"type\": \"or\",     "
+                  + "    \"fields\": [     "
+                  + "     { \"type\": \"selector\", \"dimension\": \"make\", \"value\": \"Apple\" },     "
+                  + "     { \"type\": \"selector\", \"dimension\": \"make\", \"value\": \"Samsung\" }    "
+                  + "    ]   "
+                  + "   }  "
+                  + "  ] "
+                  + " }, "
+                  + " \"aggregations\": [  "
+                  + "  { \"type\": \"longSum\", \"name\": \"total_usage\", \"fieldName\": \"user_count\" },  "
+                  + "  { \"type\": \"doubleSum\", \"name\": \"data_transfer\", \"fieldName\": \"data_transfer\" } "
+                  + " ], "
+                  + " \"postAggregations\": [  "
+                  + "  { \"type\": \"arithmetic\",  "
+                  + "    \"name\": \"avg_usage\",  "
+                  + "    \"fn\": \"/\",  "
+                  + "    \"fields\": [   "
+                  + "     { \"type\": \"fieldAccess\", \"fieldName\": \"data_transfer\" },   "
+                  + "     { \"type\": \"fieldAccess\", \"fieldName\": \"total_usage\" }  "
+                  + "    ]  "
+                  + "  } "
+                  + " ], "
+                  + " \"intervals\": [ \"2012-01-01T00:00:00.000/2012-01-03T00:00:00.000\" ], "
+                  + " \"having\": {  "
+                  + "  \"type\": \"greaterThan\",  "
+                  + "  \"aggregation\": \"total_usage\",  "
+                  + "  \"value\": 100 "
+                  + " }}";
+
   // GroupBy query results
   private static final String GROUP_BY_QUERY_RESULTS =
           "[  "
-          + " {  "
-          + "  \"version\" : \"v1\",  "
-          + "  \"timestamp\" : \"2012-01-01T00:00:00.000Z\",  "
-          + "  \"event\" : {   "
-          + "   \"country\" : \"India\",   "
-          + "   \"device\" : \"phone\",   "
-          + "   \"total_usage\" : 88,   "
-          + "   \"data_transfer\" : 29.91233453,   "
-          + "   \"avg_usage\" : 60.32  "
-          + "  } "
-          + " },  "
-          + " {  "
-          + "  \"version\" : \"v1\",  "
-          + "  \"timestamp\" : \"2012-01-01T00:00:12.000Z\",  "
-          + "  \"event\" : {   "
-          + "   \"country\" : \"Spain\",   "
-          + "   \"device\" : \"pc\",   "
-          + "   \"total_usage\" : 16,   "
-          + "   \"data_transfer\" : 172.93494959,   "
-          + "   \"avg_usage\" : 6.333333  "
-          + "  } "
-          + " }]";
+                  + " {  "
+                  + "  \"version\" : \"v1\",  "
+                  + "  \"timestamp\" : \"2012-01-01T00:00:00.000Z\",  "
+                  + "  \"event\" : {   "
+                  + "   \"country\" : \"India\",   "
+                  + "   \"device\" : \"phone\",   "
+                  + "   \"total_usage\" : 88,   "
+                  + "   \"data_transfer\" : 29.91233453,   "
+                  + "   \"avg_usage\" : 60.32  "
+                  + "  } "
+                  + " },  "
+                  + " {  "
+                  + "  \"version\" : \"v1\",  "
+                  + "  \"timestamp\" : \"2012-01-01T00:00:12.000Z\",  "
+                  + "  \"event\" : {   "
+                  + "   \"country\" : \"Spain\",   "
+                  + "   \"device\" : \"pc\",   "
+                  + "   \"total_usage\" : 16,   "
+                  + "   \"data_transfer\" : 172.93494959,   "
+                  + "   \"avg_usage\" : 6.333333  "
+                  + "  } "
+                  + " }]";
+
   // GroupBy query results as records
   private static final Object[][] GROUP_BY_QUERY_RESULTS_RECORDS = new Object[][] {
-    new Object[] { new TimestampWritable(new Timestamp(1325376000000L)), new Text("India"), new Text("phone"), new LongWritable(88), new FloatWritable(29.91233453F), new FloatWritable(60.32F) } ,
-    new Object[] { new TimestampWritable(new Timestamp(1325376012000L)), new Text("Spain"), new Text("pc"), new LongWritable(16), new FloatWritable(172.93494959F), new FloatWritable(6.333333F) }
+          new Object[] { new TimestampWritable(new Timestamp(1325376000000L)), new Text("India"),
+                  new Text("phone"), new LongWritable(88), new FloatWritable(29.91233453F),
+                  new FloatWritable(60.32F) },
+          new Object[] { new TimestampWritable(new Timestamp(1325376012000L)), new Text("Spain"),
+                  new Text("pc"), new LongWritable(16), new FloatWritable(172.93494959F),
+                  new FloatWritable(6.333333F) }
   };
 
   // Select query
   private static final String SELECT_QUERY =
           "{   \"queryType\": \"select\",  "
-          + " \"dataSource\": \"wikipedia\",   \"descending\": \"false\",  "
-          + " \"dimensions\":[\"robot\",\"namespace\",\"anonymous\",\"unpatrolled\",\"page\",\"language\",\"newpage\",\"user\"],  "
-          + " \"metrics\":[\"count\",\"added\",\"delta\",\"variation\",\"deleted\"],  "
-          + " \"granularity\": \"all\",  "
-          + " \"intervals\": [     \"2013-01-01/2013-01-02\"   ],  "
-          + " \"pagingSpec\":{\"pagingIdentifiers\": {}, \"threshold\":5} }";
+                  + " \"dataSource\": \"wikipedia\",   \"descending\": \"false\",  "
+                  + " \"dimensions\":[\"robot\",\"namespace\",\"anonymous\",\"unpatrolled\",\"page\",\"language\",\"newpage\",\"user\"],  "
+                  + " \"metrics\":[\"count\",\"added\",\"delta\",\"variation\",\"deleted\"],  "
+                  + " \"granularity\": \"all\",  "
+                  + " \"intervals\": [     \"2013-01-01/2013-01-02\"   ],  "
+                  + " \"pagingSpec\":{\"pagingIdentifiers\": {}, \"threshold\":5} }";
+
   // Select query results
   private static final String SELECT_QUERY_RESULTS =
           "[{ "
-          + " \"timestamp\" : \"2013-01-01T00:00:00.000Z\", "
-          + " \"result\" : {  "
-          + "  \"pagingIdentifiers\" : {   "
-          + "   \"wikipedia_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\" : 4    }, "
-          + "   \"events\" : [ {  "
-          + "    \"segmentId\" : \"wikipedia_editstream_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\",  "
-          + "    \"offset\" : 0,  "
-          + "    \"event\" : {   "
-          + "     \"timestamp\" : \"2013-01-01T00:00:00.000Z\",   "
-          + "     \"robot\" : \"1\",   "
-          + "     \"namespace\" : \"article\",   "
-          + "     \"anonymous\" : \"0\",   "
-          + "     \"unpatrolled\" : \"0\",   "
-          + "     \"page\" : \"11._korpus_(NOVJ)\",   "
-          + "     \"language\" : \"sl\",   "
-          + "     \"newpage\" : \"0\",   "
-          + "     \"user\" : \"EmausBot\",   "
-          + "     \"count\" : 1.0,   "
-          + "     \"added\" : 39.0,   "
-          + "     \"delta\" : 39.0,   "
-          + "     \"variation\" : 39.0,   "
-          + "     \"deleted\" : 0.0  "
-          + "    } "
-          + "   }, {  "
-          + "    \"segmentId\" : \"wikipedia_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\",  "
-          + "    \"offset\" : 1,  "
-          + "    \"event\" : {   "
-          + "     \"timestamp\" : \"2013-01-01T00:00:00.000Z\",   "
-          + "     \"robot\" : \"0\",   "
-          + "     \"namespace\" : \"article\",   "
-          + "     \"anonymous\" : \"0\",   "
-          + "     \"unpatrolled\" : \"0\",   "
-          + "     \"page\" : \"112_U.S._580\",   "
-          + "     \"language\" : \"en\",   "
-          + "     \"newpage\" : \"1\",   "
-          + "     \"user\" : \"MZMcBride\",   "
-          + "     \"count\" : 1.0,   "
-          + "     \"added\" : 70.0,   "
-          + "     \"delta\" : 70.0,   "
-          + "     \"variation\" : 70.0,   "
-          + "     \"deleted\" : 0.0  "
-          + "    } "
-          + "   }, {  "
-          + "    \"segmentId\" : \"wikipedia_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\",  "
-          + "    \"offset\" : 2,  "
-          + "    \"event\" : {   "
-          + "     \"timestamp\" : \"2013-01-01T00:00:12.000Z\",   "
-          + "     \"robot\" : \"0\",   "
-          + "     \"namespace\" : \"article\",   "
-          + "     \"anonymous\" : \"0\",   "
-          + "     \"unpatrolled\" : \"0\",   "
-          + "     \"page\" : \"113_U.S._243\",   "
-          + "     \"language\" : \"en\",   "
-          + "     \"newpage\" : \"1\",   "
-          + "     \"user\" : \"MZMcBride\",   "
-          + "     \"count\" : 1.0,   "
-          + "     \"added\" : 77.0,   "
-          + "     \"delta\" : 77.0,   "
-          + "     \"variation\" : 77.0,   "
-          + "     \"deleted\" : 0.0  "
-          + "    } "
-          + "   }, {  "
-          + "    \"segmentId\" : \"wikipedia_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\",  "
-          + "    \"offset\" : 3,  "
-          + "    \"event\" : {   "
-          + "     \"timestamp\" : \"2013-01-01T00:00:12.000Z\",   "
-          + "     \"robot\" : \"0\",   "
-          + "     \"namespace\" : \"article\",   "
-          + "     \"anonymous\" : \"0\",   "
-          + "     \"unpatrolled\" : \"0\",   "
-          + "     \"page\" : \"113_U.S._73\",   "
-          + "     \"language\" : \"en\",   "
-          + "     \"newpage\" : \"1\",   "
-          + "     \"user\" : \"MZMcBride\",   "
-          + "     \"count\" : 1.0,   "
-          + "     \"added\" : 70.0,   "
-          + "     \"delta\" : 70.0,   "
-          + "     \"variation\" : 70.0,   "
-          + "     \"deleted\" : 0.0  "
-          + "    } "
-          + "   }, {  "
-          + "    \"segmentId\" : \"wikipedia_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\",  "
-          + "    \"offset\" : 4,  "
-          + "    \"event\" : {   "
-          + "     \"timestamp\" : \"2013-01-01T00:00:12.000Z\",   "
-          + "     \"robot\" : \"0\",   "
-          + "     \"namespace\" : \"article\",   "
-          + "     \"anonymous\" : \"0\",   "
-          + "     \"unpatrolled\" : \"0\",   "
-          + "     \"page\" : \"113_U.S._756\",   "
-          + "     \"language\" : \"en\",   "
-          + "     \"newpage\" : \"1\",   "
-          + "     \"user\" : \"MZMcBride\",   "
-          + "     \"count\" : 1.0,   "
-          + "     \"added\" : 68.0,   "
-          + "     \"delta\" : 68.0,   "
-          + "     \"variation\" : 68.0,   "
-          + "     \"deleted\" : 0.0  "
-          + "    } "
-          + "   } ]  }} ]";
+                  + " \"timestamp\" : \"2013-01-01T00:00:00.000Z\", "
+                  + " \"result\" : {  "
+                  + "  \"pagingIdentifiers\" : {   "
+                  + "   \"wikipedia_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\" : 4    }, "
+                  + "   \"events\" : [ {  "
+                  + "    \"segmentId\" : \"wikipedia_editstream_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\",  "
+                  + "    \"offset\" : 0,  "
+                  + "    \"event\" : {   "
+                  + "     \"timestamp\" : \"2013-01-01T00:00:00.000Z\",   "
+                  + "     \"robot\" : \"1\",   "
+                  + "     \"namespace\" : \"article\",   "
+                  + "     \"anonymous\" : \"0\",   "
+                  + "     \"unpatrolled\" : \"0\",   "
+                  + "     \"page\" : \"11._korpus_(NOVJ)\",   "
+                  + "     \"language\" : \"sl\",   "
+                  + "     \"newpage\" : \"0\",   "
+                  + "     \"user\" : \"EmausBot\",   "
+                  + "     \"count\" : 1.0,   "
+                  + "     \"added\" : 39.0,   "
+                  + "     \"delta\" : 39.0,   "
+                  + "     \"variation\" : 39.0,   "
+                  + "     \"deleted\" : 0.0  "
+                  + "    } "
+                  + "   }, {  "
+                  + "    \"segmentId\" : \"wikipedia_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\",  "
+                  + "    \"offset\" : 1,  "
+                  + "    \"event\" : {   "
+                  + "     \"timestamp\" : \"2013-01-01T00:00:00.000Z\",   "
+                  + "     \"robot\" : \"0\",   "
+                  + "     \"namespace\" : \"article\",   "
+                  + "     \"anonymous\" : \"0\",   "
+                  + "     \"unpatrolled\" : \"0\",   "
+                  + "     \"page\" : \"112_U.S._580\",   "
+                  + "     \"language\" : \"en\",   "
+                  + "     \"newpage\" : \"1\",   "
+                  + "     \"user\" : \"MZMcBride\",   "
+                  + "     \"count\" : 1.0,   "
+                  + "     \"added\" : 70.0,   "
+                  + "     \"delta\" : 70.0,   "
+                  + "     \"variation\" : 70.0,   "
+                  + "     \"deleted\" : 0.0  "
+                  + "    } "
+                  + "   }, {  "
+                  + "    \"segmentId\" : \"wikipedia_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\",  "
+                  + "    \"offset\" : 2,  "
+                  + "    \"event\" : {   "
+                  + "     \"timestamp\" : \"2013-01-01T00:00:12.000Z\",   "
+                  + "     \"robot\" : \"0\",   "
+                  + "     \"namespace\" : \"article\",   "
+                  + "     \"anonymous\" : \"0\",   "
+                  + "     \"unpatrolled\" : \"0\",   "
+                  + "     \"page\" : \"113_U.S._243\",   "
+                  + "     \"language\" : \"en\",   "
+                  + "     \"newpage\" : \"1\",   "
+                  + "     \"user\" : \"MZMcBride\",   "
+                  + "     \"count\" : 1.0,   "
+                  + "     \"added\" : 77.0,   "
+                  + "     \"delta\" : 77.0,   "
+                  + "     \"variation\" : 77.0,   "
+                  + "     \"deleted\" : 0.0  "
+                  + "    } "
+                  + "   }, {  "
+                  + "    \"segmentId\" : \"wikipedia_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\",  "
+                  + "    \"offset\" : 3,  "
+                  + "    \"event\" : {   "
+                  + "     \"timestamp\" : \"2013-01-01T00:00:12.000Z\",   "
+                  + "     \"robot\" : \"0\",   "
+                  + "     \"namespace\" : \"article\",   "
+                  + "     \"anonymous\" : \"0\",   "
+                  + "     \"unpatrolled\" : \"0\",   "
+                  + "     \"page\" : \"113_U.S._73\",   "
+                  + "     \"language\" : \"en\",   "
+                  + "     \"newpage\" : \"1\",   "
+                  + "     \"user\" : \"MZMcBride\",   "
+                  + "     \"count\" : 1.0,   "
+                  + "     \"added\" : 70.0,   "
+                  + "     \"delta\" : 70.0,   "
+                  + "     \"variation\" : 70.0,   "
+                  + "     \"deleted\" : 0.0  "
+                  + "    } "
+                  + "   }, {  "
+                  + "    \"segmentId\" : \"wikipedia_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\",  "
+                  + "    \"offset\" : 4,  "
+                  + "    \"event\" : {   "
+                  + "     \"timestamp\" : \"2013-01-01T00:00:12.000Z\",   "
+                  + "     \"robot\" : \"0\",   "
+                  + "     \"namespace\" : \"article\",   "
+                  + "     \"anonymous\" : \"0\",   "
+                  + "     \"unpatrolled\" : \"0\",   "
+                  + "     \"page\" : \"113_U.S._756\",   "
+                  + "     \"language\" : \"en\",   "
+                  + "     \"newpage\" : \"1\",   "
+                  + "     \"user\" : \"MZMcBride\",   "
+                  + "     \"count\" : 1.0,   "
+                  + "     \"added\" : 68.0,   "
+                  + "     \"delta\" : 68.0,   "
+                  + "     \"variation\" : 68.0,   "
+                  + "     \"deleted\" : 0.0  "
+                  + "    } "
+                  + "   } ]  }} ]";
+
   // Select query results as records
   private static final Object[][] SELECT_QUERY_RESULTS_RECORDS = new Object[][] {
-    new Object[] { new TimestampWritable(new Timestamp(1356998400000L)), new Text("1"), new Text("article"), new Text("0"), new Text("0"),
-        new Text("11._korpus_(NOVJ)"), new Text("sl"), new Text("0"), new Text("EmausBot"),
-        new FloatWritable(1.0F), new FloatWritable(39.0F), new FloatWritable(39.0F), new FloatWritable(39.0F), new FloatWritable(0.0F) } ,
-    new Object[] { new TimestampWritable(new Timestamp(1356998400000L)), new Text("0"), new Text("article"), new Text("0"), new Text("0"),
-        new Text("112_U.S._580"), new Text("en"), new Text("1"), new Text("MZMcBride"),
-        new FloatWritable(1.0F), new FloatWritable(70.0F), new FloatWritable(70.0F), new FloatWritable(70.0F), new FloatWritable(0.0F) } ,
-    new Object[] { new TimestampWritable(new Timestamp(1356998412000L)), new Text("0"), new Text("article"), new Text("0"), new Text("0"),
-        new Text("113_U.S._243"), new Text("en"), new Text("1"), new Text("MZMcBride"),
-        new FloatWritable(1.0F), new FloatWritable(77.0F), new FloatWritable(77.0F), new FloatWritable(77.0F), new FloatWritable(0.0F) } ,
-    new Object[] { new TimestampWritable(new Timestamp(1356998412000L)), new Text("0"), new Text("article"), new Text("0"), new Text("0"),
-        new Text("113_U.S._73"), new Text("en"), new Text("1"), new Text("MZMcBride"),
-        new FloatWritable(1.0F), new FloatWritable(70.0F), new FloatWritable(70.0F), new FloatWritable(70.0F), new FloatWritable(0.0F) } ,
-    new Object[] { new TimestampWritable(new Timestamp(1356998412000L)), new Text("0"), new Text("article"), new Text("0"), new Text("0"),
-        new Text("113_U.S._756"), new Text("en"), new Text("1"), new Text("MZMcBride"),
-        new FloatWritable(1.0F), new FloatWritable(68.0F), new FloatWritable(68.0F), new FloatWritable(68.0F), new FloatWritable(0.0F) }
+          new Object[] { new TimestampWritable(new Timestamp(1356998400000L)), new Text("1"),
+                  new Text("article"), new Text("0"), new Text("0"),
+                  new Text("11._korpus_(NOVJ)"), new Text("sl"), new Text("0"),
+                  new Text("EmausBot"),
+                  new FloatWritable(1.0F), new FloatWritable(39.0F), new FloatWritable(39.0F),
+                  new FloatWritable(39.0F), new FloatWritable(0.0F) },
+          new Object[] { new TimestampWritable(new Timestamp(1356998400000L)), new Text("0"),
+                  new Text("article"), new Text("0"), new Text("0"),
+                  new Text("112_U.S._580"), new Text("en"), new Text("1"), new Text("MZMcBride"),
+                  new FloatWritable(1.0F), new FloatWritable(70.0F), new FloatWritable(70.0F),
+                  new FloatWritable(70.0F), new FloatWritable(0.0F) },
+          new Object[] { new TimestampWritable(new Timestamp(1356998412000L)), new Text("0"),
+                  new Text("article"), new Text("0"), new Text("0"),
+                  new Text("113_U.S._243"), new Text("en"), new Text("1"), new Text("MZMcBride"),
+                  new FloatWritable(1.0F), new FloatWritable(77.0F), new FloatWritable(77.0F),
+                  new FloatWritable(77.0F), new FloatWritable(0.0F) },
+          new Object[] { new TimestampWritable(new Timestamp(1356998412000L)), new Text("0"),
+                  new Text("article"), new Text("0"), new Text("0"),
+                  new Text("113_U.S._73"), new Text("en"), new Text("1"), new Text("MZMcBride"),
+                  new FloatWritable(1.0F), new FloatWritable(70.0F), new FloatWritable(70.0F),
+                  new FloatWritable(70.0F), new FloatWritable(0.0F) },
+          new Object[] { new TimestampWritable(new Timestamp(1356998412000L)), new Text("0"),
+                  new Text("article"), new Text("0"), new Text("0"),
+                  new Text("113_U.S._756"), new Text("en"), new Text("1"), new Text("MZMcBride"),
+                  new FloatWritable(1.0F), new FloatWritable(68.0F), new FloatWritable(68.0F),
+                  new FloatWritable(68.0F), new FloatWritable(0.0F) }
   };
 
-
   /**
    * Test the default behavior of the objects and object inspectors.
-   * @throws IOException 
-   * @throws IllegalAccessException 
-   * @throws IllegalArgumentException 
-   * @throws SecurityException 
-   * @throws NoSuchFieldException 
-   * @throws JsonMappingException 
-   * @throws JsonParseException 
-   * @throws InvocationTargetException 
-   * @throws NoSuchMethodException 
+   * @throws IOException
+   * @throws IllegalAccessException
+   * @throws IllegalArgumentException
+   * @throws SecurityException
+   * @throws NoSuchFieldException
+   * @throws JsonMappingException
+   * @throws JsonParseException
+   * @throws InvocationTargetException
+   * @throws NoSuchMethodException
    */
   @Test
   public void testDruidSerDe()
@@ -457,25 +491,31 @@ public class TestDruidSerDe {
     tbl = createPropertiesQuery("sample_datasource", Query.TIMESERIES, TIMESERIES_QUERY);
     SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
     deserializeQueryResults(serDe, Query.TIMESERIES, TIMESERIES_QUERY,
-            TIMESERIES_QUERY_RESULTS, TIMESERIES_QUERY_RESULTS_RECORDS);
+            TIMESERIES_QUERY_RESULTS, TIMESERIES_QUERY_RESULTS_RECORDS
+    );
     // TopN query
     tbl = createPropertiesQuery("sample_data", Query.TOPN, TOPN_QUERY);
     SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
     deserializeQueryResults(serDe, Query.TOPN, TOPN_QUERY,
-            TOPN_QUERY_RESULTS, TOPN_QUERY_RESULTS_RECORDS);
+            TOPN_QUERY_RESULTS, TOPN_QUERY_RESULTS_RECORDS
+    );
     // GroupBy query
     tbl = createPropertiesQuery("sample_datasource", Query.GROUP_BY, GROUP_BY_QUERY);
     SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
     deserializeQueryResults(serDe, Query.GROUP_BY, GROUP_BY_QUERY,
-            GROUP_BY_QUERY_RESULTS, GROUP_BY_QUERY_RESULTS_RECORDS);
+            GROUP_BY_QUERY_RESULTS, GROUP_BY_QUERY_RESULTS_RECORDS
+    );
     // Select query
     tbl = createPropertiesQuery("wikipedia", Query.SELECT, SELECT_QUERY);
     SerDeUtils.initializeSerDe(serDe, conf, tbl, null);
     deserializeQueryResults(serDe, Query.SELECT, SELECT_QUERY,
-            SELECT_QUERY_RESULTS, SELECT_QUERY_RESULTS_RECORDS);
+            SELECT_QUERY_RESULTS, SELECT_QUERY_RESULTS_RECORDS
+    );
   }
 
-  private static Properties createPropertiesQuery(String dataSource, String queryType, String jsonQuery) {
+  private static Properties createPropertiesQuery(String dataSource, String queryType,
+          String jsonQuery
+  ) {
     Properties tbl = new Properties();
 
     // Set the configuration parameters
@@ -486,14 +526,15 @@ public class TestDruidSerDe {
   }
 
   private static void deserializeQueryResults(DruidSerDe serDe, String queryType, String jsonQuery,
-          String resultString, Object[][] records) throws SerDeException, JsonParseException,
+          String resultString, Object[][] records
+  ) throws SerDeException, JsonParseException,
           JsonMappingException, IOException, NoSuchFieldException, SecurityException,
           IllegalArgumentException, IllegalAccessException, InterruptedException,
           NoSuchMethodException, InvocationTargetException {
 
     // Initialize
     Query<?> query = null;
-    DruidQueryRecordReader<?,?> reader = null;
+    DruidQueryRecordReader<?, ?> reader = null;
     List<?> resultsList = null;
     ObjectMapper mapper = new DefaultObjectMapper();
     switch (queryType) {
@@ -501,25 +542,33 @@ public class TestDruidSerDe {
         query = mapper.readValue(jsonQuery, TimeseriesQuery.class);
         reader = new DruidTimeseriesQueryRecordReader();
         resultsList = mapper.readValue(resultString,
-                new TypeReference<List<Result<TimeseriesResultValue>>>() {});
+                new TypeReference<List<Result<TimeseriesResultValue>>>() {
+                }
+        );
         break;
       case Query.TOPN:
         query = mapper.readValue(jsonQuery, TopNQuery.class);
         reader = new DruidTopNQueryRecordReader();
         resultsList = mapper.readValue(resultString,
-                new TypeReference<List<Result<TopNResultValue>>>() {});
+                new TypeReference<List<Result<TopNResultValue>>>() {
+                }
+        );
         break;
       case Query.GROUP_BY:
         query = mapper.readValue(jsonQuery, GroupByQuery.class);
         reader = new DruidGroupByQueryRecordReader();
         resultsList = mapper.readValue(resultString,
-                new TypeReference<List<Row>>() {});
+                new TypeReference<List<Row>>() {
+                }
+        );
         break;
       case Query.SELECT:
         query = mapper.readValue(jsonQuery, SelectQuery.class);
         reader = new DruidSelectQueryRecordReader();
         resultsList = mapper.readValue(resultString,
-                new TypeReference<List<Result<SelectResultValue>>>() {});
+                new TypeReference<List<Result<SelectResultValue>>>() {
+                }
+        );
         break;
     }
 
@@ -534,7 +583,7 @@ public class TestDruidSerDe {
     }
     Field field2 = DruidQueryRecordReader.class.getDeclaredField("results");
     field2.setAccessible(true);
-    
+
     // Get the row structure
     StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
     List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();

http://git-wip-us.apache.org/repos/asf/hive/blob/590687bc/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
index 9ccd48e..4fde3eb 100644
--- a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
+++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
+import org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat;
 import org.joda.time.Interval;
 import org.joda.time.chrono.ISOChronology;
 import org.junit.Test;
@@ -29,14 +30,15 @@ import org.junit.Test;
 import junit.framework.TestCase;
 
 public class TestHiveDruidQueryBasedInputFormat extends TestCase {
-  
+
   @SuppressWarnings("unchecked")
   @Test
   public void testCreateSplitsIntervals() throws Exception {
-    HiveDruidQueryBasedInputFormat input = new HiveDruidQueryBasedInputFormat();
+    DruidQueryBasedInputFormat input = new DruidQueryBasedInputFormat();
 
-    Method method1 = HiveDruidQueryBasedInputFormat.class.getDeclaredMethod("createSplitsIntervals",
-            List.class, int.class);
+    Method method1 = DruidQueryBasedInputFormat.class.getDeclaredMethod("createSplitsIntervals",
+            List.class, int.class
+    );
     method1.setAccessible(true);
 
     List<Interval> intervals;
@@ -48,10 +50,14 @@ public class TestHiveDruidQueryBasedInputFormat extends TestCase {
     intervals.add(new Interval(1262304000000L, 1293840000000L, ISOChronology.getInstanceUTC()));
     resultList = (List<List<Interval>>) method1.invoke(input, intervals, 4);
     expectedResultList = new ArrayList<>();
-    expectedResultList.add(Arrays.asList(new Interval(1262304000000L, 1270188000000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1270188000000L, 1278072000000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1278072000000L, 1285956000000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1285956000000L, 1293840000000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1262304000000L, 1270188000000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1270188000000L, 1278072000000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1278072000000L, 1285956000000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1285956000000L, 1293840000000L, ISOChronology.getInstanceUTC())));
     assertEquals(expectedResultList, resultList);
 
     // Test 2 : two splits, create 4
@@ -60,11 +66,16 @@ public class TestHiveDruidQueryBasedInputFormat extends TestCase {
     intervals.add(new Interval(1325376000000L, 1356998400000L, ISOChronology.getInstanceUTC()));
     resultList = (List<List<Interval>>) method1.invoke(input, intervals, 4);
     expectedResultList = new ArrayList<>();
-    expectedResultList.add(Arrays.asList(new Interval(1262304000000L, 1278093600000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1278093600000L, 1293840000000L, ISOChronology.getInstanceUTC()),
-            new Interval(1325376000000L, 1325419200000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1325419200000L, 1341208800000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1341208800000L, 1356998400000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1262304000000L, 1278093600000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1278093600000L, 1293840000000L, ISOChronology.getInstanceUTC()),
+                    new Interval(1325376000000L, 1325419200000L, ISOChronology.getInstanceUTC())
+            ));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1325419200000L, 1341208800000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1341208800000L, 1356998400000L, ISOChronology.getInstanceUTC())));
     assertEquals(expectedResultList, resultList);
 
     // Test 3 : two splits, create 5
@@ -73,29 +84,49 @@ public class TestHiveDruidQueryBasedInputFormat extends TestCase {
     intervals.add(new Interval(1325376000000L, 1356998400000L, ISOChronology.getInstanceUTC()));
     resultList = (List<List<Interval>>) method1.invoke(input, intervals, 5);
     expectedResultList = new ArrayList<>();
-    expectedResultList.add(Arrays.asList(new Interval(1262304000000L, 1274935680000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1274935680000L, 1287567360000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1287567360000L, 1293840000000L, ISOChronology.getInstanceUTC()),
-            new Interval(1325376000000L, 1331735040000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1331735040000L, 1344366720000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1344366720000L, 1356998400000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1262304000000L, 1274935680000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1274935680000L, 1287567360000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1287567360000L, 1293840000000L, ISOChronology.getInstanceUTC()),
+                    new Interval(1325376000000L, 1331735040000L, ISOChronology.getInstanceUTC())
+            ));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1331735040000L, 1344366720000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1344366720000L, 1356998400000L, ISOChronology.getInstanceUTC())));
     assertEquals(expectedResultList, resultList);
 
     // Test 4 : three splits, different ranges, create 6
     intervals = new ArrayList<>();
-    intervals.add(new Interval(1199145600000L, 1201824000000L, ISOChronology.getInstanceUTC())); // one month
-    intervals.add(new Interval(1325376000000L, 1356998400000L, ISOChronology.getInstanceUTC())); // one year
-    intervals.add(new Interval(1407283200000L, 1407888000000L, ISOChronology.getInstanceUTC())); // 7 days
+    intervals.add(new Interval(1199145600000L, 1201824000000L,
+            ISOChronology.getInstanceUTC()
+    )); // one month
+    intervals.add(new Interval(1325376000000L, 1356998400000L,
+            ISOChronology.getInstanceUTC()
+    )); // one year
+    intervals.add(new Interval(1407283200000L, 1407888000000L,
+            ISOChronology.getInstanceUTC()
+    )); // 7 days
     resultList = (List<List<Interval>>) method1.invoke(input, intervals, 6);
     expectedResultList = new ArrayList<>();
-    expectedResultList.add(Arrays.asList(new Interval(1199145600000L, 1201824000000L, ISOChronology.getInstanceUTC()),
-            new Interval(1325376000000L, 1328515200000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1328515200000L, 1334332800000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1334332800000L, 1340150400000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1340150400000L, 1345968000000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1345968000000L, 1351785600000L, ISOChronology.getInstanceUTC())));
-    expectedResultList.add(Arrays.asList(new Interval(1351785600000L, 1356998400000L, ISOChronology.getInstanceUTC()),
-            new Interval(1407283200000L, 1407888000000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1199145600000L, 1201824000000L, ISOChronology.getInstanceUTC()),
+                    new Interval(1325376000000L, 1328515200000L, ISOChronology.getInstanceUTC())
+            ));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1328515200000L, 1334332800000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1334332800000L, 1340150400000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1340150400000L, 1345968000000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1345968000000L, 1351785600000L, ISOChronology.getInstanceUTC())));
+    expectedResultList.add(Arrays
+            .asList(new Interval(1351785600000L, 1356998400000L, ISOChronology.getInstanceUTC()),
+                    new Interval(1407283200000L, 1407888000000L, ISOChronology.getInstanceUTC())
+            ));
     assertEquals(expectedResultList, resultList);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/590687bc/druid-handler/src/test/org/apache/hadoop/hive/ql/io/DruidRecordWriterTest.java
----------------------------------------------------------------------
diff --git a/druid-handler/src/test/org/apache/hadoop/hive/ql/io/DruidRecordWriterTest.java b/druid-handler/src/test/org/apache/hadoop/hive/ql/io/DruidRecordWriterTest.java
new file mode 100644
index 0000000..a4272ee
--- /dev/null
+++ b/druid-handler/src/test/org/apache/hadoop/hive/ql/io/DruidRecordWriterTest.java
@@ -0,0 +1,221 @@
+package org.apache.hadoop.hive.ql.io;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Function;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+import com.metamx.common.Granularity;
+import io.druid.data.input.Firehose;
+import io.druid.data.input.InputRow;
+import io.druid.data.input.impl.DimensionSchema;
+import io.druid.data.input.impl.DimensionsSpec;
+import io.druid.data.input.impl.InputRowParser;
+import io.druid.data.input.impl.MapInputRowParser;
+import io.druid.data.input.impl.StringDimensionSchema;
+import io.druid.data.input.impl.TimeAndDimsParseSpec;
+import io.druid.data.input.impl.TimestampSpec;
+import io.druid.granularity.QueryGranularities;
+import io.druid.query.aggregation.AggregatorFactory;
+import io.druid.query.aggregation.LongSumAggregatorFactory;
+import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
+import io.druid.segment.QueryableIndex;
+import io.druid.segment.QueryableIndexStorageAdapter;
+import io.druid.segment.indexing.DataSchema;
+import io.druid.segment.indexing.RealtimeTuningConfig;
+import io.druid.segment.indexing.granularity.UniformGranularitySpec;
+import io.druid.segment.loading.DataSegmentPusher;
+import io.druid.segment.loading.LocalDataSegmentPuller;
+import io.druid.segment.loading.LocalDataSegmentPusher;
+import io.druid.segment.loading.LocalDataSegmentPusherConfig;
+import io.druid.segment.loading.SegmentLoadingException;
+import io.druid.segment.realtime.firehose.IngestSegmentFirehose;
+import io.druid.segment.realtime.firehose.WindowedStorageAdapter;
+import io.druid.timeline.DataSegment;
+import org.apache.calcite.adapter.druid.DruidTable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.Constants;
+import org.apache.hadoop.hive.druid.DruidStorageHandler;
+import org.apache.hadoop.hive.druid.DruidStorageHandlerUtils;
+import org.apache.hadoop.hive.druid.io.DruidRecordWriter;
+import org.apache.hadoop.hive.druid.serde.DruidWritable;
+import org.joda.time.DateTime;
+import org.joda.time.Interval;
+import org.junit.Assert;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import javax.annotation.Nullable;
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+public class DruidRecordWriterTest {
+  private ObjectMapper objectMapper = DruidStorageHandlerUtils.JSON_MAPPER;
+
+  private static final Interval INTERVAL_FULL = new Interval("2014-10-22T00:00:00Z/P1D");
+
+  @Rule
+  public TemporaryFolder temporaryFolder = new TemporaryFolder();
+
+  private DruidRecordWriter druidRecordWriter;
+
+  final List<ImmutableMap<String, Object>> expectedRows = ImmutableList.of(
+          ImmutableMap.<String, Object>of(
+                  DruidTable.DEFAULT_TIMESTAMP_COLUMN,
+                  DateTime.parse("2014-10-22T00:00:00.000Z").getMillis(),
+                  "host", ImmutableList.of("a.example.com"),
+                  "visited_sum", 190L,
+                  "unique_hosts", 1.0d
+          ),
+          ImmutableMap.<String, Object>of(
+                  DruidTable.DEFAULT_TIMESTAMP_COLUMN,
+                  DateTime.parse("2014-10-22T01:00:00.000Z").getMillis(),
+                  "host", ImmutableList.of("b.example.com"),
+                  "visited_sum", 175L,
+                  "unique_hosts", 1.0d
+          ),
+          ImmutableMap.<String, Object>of(
+                  DruidTable.DEFAULT_TIMESTAMP_COLUMN,
+                  DateTime.parse("2014-10-22T02:00:00.000Z").getMillis(),
+                  "host", ImmutableList.of("c.example.com"),
+                  "visited_sum", 270L,
+                  "unique_hosts", 1.0d
+          )
+  );
+
+  // This test need this patch https://github.com/druid-io/druid/pull/3483
+  @Ignore
+  @Test
+  public void testWrite() throws IOException, SegmentLoadingException {
+
+    final String dataSourceName = "testDataSource";
+    final File segmentOutputDir = temporaryFolder.newFolder();
+    final File workingDir = temporaryFolder.newFolder();
+    Configuration config = new Configuration();
+
+    final InputRowParser inputRowParser = new MapInputRowParser(new TimeAndDimsParseSpec(
+            new TimestampSpec(DruidTable.DEFAULT_TIMESTAMP_COLUMN, "auto", null),
+            new DimensionsSpec(ImmutableList.<DimensionSchema>of(new StringDimensionSchema("host")),
+                    null, null
+            )
+    ));
+    final Map<String, Object> parserMap = objectMapper.convertValue(inputRowParser, Map.class);
+
+    DataSchema dataSchema = new DataSchema(
+            dataSourceName,
+            parserMap,
+            new AggregatorFactory[] {
+                    new LongSumAggregatorFactory("visited_sum", "visited_sum"),
+                    new HyperUniquesAggregatorFactory("unique_hosts", "unique_hosts")
+            },
+            new UniformGranularitySpec(
+                    Granularity.DAY, QueryGranularities.NONE, ImmutableList.of(INTERVAL_FULL)
+            ),
+            objectMapper
+    );
+
+    RealtimeTuningConfig tuningConfig = RealtimeTuningConfig
+            .makeDefaultTuningConfig(temporaryFolder.newFolder());
+    LocalFileSystem localFileSystem = FileSystem.getLocal(config);
+    DataSegmentPusher dataSegmentPusher = new LocalDataSegmentPusher(
+            new LocalDataSegmentPusherConfig() {
+              @Override
+              public File getStorageDirectory() {return segmentOutputDir;}
+            }, objectMapper);
+
+    Path segmentDescriptroPath = new Path(workingDir.getAbsolutePath(),
+            DruidStorageHandler.SEGMENTS_DESCRIPTOR_DIR_NAME
+    );
+    druidRecordWriter = new DruidRecordWriter(dataSchema, tuningConfig, dataSegmentPusher, 20,
+            segmentDescriptroPath, localFileSystem
+    );
+
+    List<DruidWritable> druidWritables = Lists.transform(expectedRows,
+            new Function<ImmutableMap<String, Object>, DruidWritable>() {
+              @Nullable
+              @Override
+              public DruidWritable apply(@Nullable ImmutableMap<String, Object> input
+              ) {
+                return new DruidWritable(ImmutableMap.<String, Object>builder().putAll(input)
+                        .put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME,
+                                Granularity.DAY.truncate(
+                                        new DateTime((long) input
+                                                .get(DruidTable.DEFAULT_TIMESTAMP_COLUMN)))
+                                        .getMillis()
+                        ).build());
+              }
+            }
+    );
+    for (DruidWritable druidWritable : druidWritables) {
+      druidRecordWriter.write(druidWritable);
+    }
+    druidRecordWriter.close(false);
+    List<DataSegment> dataSegmentList = DruidStorageHandlerUtils
+            .getPublishedSegments(segmentDescriptroPath, config);
+    Assert.assertEquals(1, dataSegmentList.size());
+    File tmpUnzippedSegmentDir = temporaryFolder.newFolder();
+    new LocalDataSegmentPuller().getSegmentFiles(dataSegmentList.get(0), tmpUnzippedSegmentDir);
+    final QueryableIndex queryableIndex = DruidStorageHandlerUtils.INDEX_IO
+            .loadIndex(tmpUnzippedSegmentDir);
+
+    QueryableIndexStorageAdapter adapter = new QueryableIndexStorageAdapter(queryableIndex);
+
+    Firehose firehose = new IngestSegmentFirehose(
+            ImmutableList.of(new WindowedStorageAdapter(adapter, adapter.getInterval())),
+            ImmutableList.of("host"),
+            ImmutableList.of("visited_sum", "unique_hosts"),
+            null,
+            QueryGranularities.NONE
+    );
+
+    List<InputRow> rows = Lists.newArrayList();
+    while (firehose.hasMore()) {
+      rows.add(firehose.nextRow());
+    }
+
+    verifyRows(expectedRows, rows);
+
+  }
+
+  private void verifyRows(List<ImmutableMap<String, Object>> expectedRows,
+          List<InputRow> actualRows
+  ) {
+    System.out.println("actualRows = " + actualRows);
+    Assert.assertEquals(expectedRows.size(), actualRows.size());
+
+    for (int i = 0; i < expectedRows.size(); i++) {
+      Map<String, Object> expected = expectedRows.get(i);
+      InputRow actual = actualRows.get(i);
+
+      Assert.assertEquals(ImmutableList.of("host"), actual.getDimensions());
+
+      Assert.assertEquals(expected.get(DruidTable.DEFAULT_TIMESTAMP_COLUMN),
+              actual.getTimestamp().getMillis()
+      );
+      Assert.assertEquals(expected.get("host"), actual.getDimension("host"));
+      Assert.assertEquals(expected.get("visited_sum"), actual.getLongMetric("visited_sum"));
+      Assert.assertEquals(
+              (Double) expected.get("unique_hosts"),
+              (Double) HyperUniquesAggregatorFactory
+                      .estimateCardinality(actual.getRaw("unique_hosts")),
+              0.001
+      );
+    }
+  }
+
+  @Test
+  public void testSerDesr() throws IOException {
+    String segment = "{\"dataSource\":\"datasource2015\",\"interval\":\"2015-06-01T00:00:00.000-04:00/2015-06-02T00:00:00.000-04:00\",\"version\":\"2016-11-04T19:24:01.732-04:00\",\"loadSpec\":{\"type\":\"hdfs\",\"path\":\"hdfs://cn105-10.l42scl.hortonworks.com:8020/apps/hive/warehouse/druid.db/.hive-staging_hive_2016-11-04_19-23-50_168_1550339856804207572-1/_task_tmp.-ext-10002/_tmp.000000_0/datasource2015/20150601T000000.000-0400_20150602T000000.000-0400/2016-11-04T19_24_01.732-04_00/0/index.zip\"},\"dimensions\":\"dimension1\",\"metrics\":\"bigint\",\"shardSpec\":{\"type\":\"linear\",\"partitionNum\":0},\"binaryVersion\":9,\"size\":1765,\"identifier\":\"datasource2015_2015-06-01T00:00:00.000-04:00_2015-06-02T00:00:00.000-04:00_2016-11-04T19:24:01.732-04:00\"}";
+    DataSegment dataSegment = objectMapper.readerFor(DataSegment.class)
+            .readValue(segment);
+    Assert.assertTrue(dataSegment.getDataSource().equals("datasource2015"));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/590687bc/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
index 87bd5c8..d8689ba 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
@@ -17,19 +17,12 @@
  */
 package org.apache.hadoop.hive.llap.daemon.impl;
 
-import java.net.InetSocketAddress;
-import java.nio.ByteBuffer;
-import java.security.PrivilegedExceptionAction;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Stack;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Stopwatch;
+import com.google.common.collect.HashMultimap;
+import com.google.common.collect.Multimap;
+import com.google.common.util.concurrent.FutureCallback;
+import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.llap.daemon.FragmentCompletionHandler;
@@ -50,7 +43,6 @@ import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.log4j.MDC;
 import org.apache.log4j.NDC;
 import org.apache.tez.common.CallableWithNdc;
@@ -58,10 +50,7 @@ import org.apache.tez.common.TezCommonUtils;
 import org.apache.tez.common.security.JobTokenIdentifier;
 import org.apache.tez.common.security.TokenCache;
 import org.apache.tez.dag.api.TezConstants;
-import org.apache.tez.dag.records.TezDAGID;
 import org.apache.tez.dag.records.TezTaskAttemptID;
-import org.apache.tez.dag.records.TezTaskID;
-import org.apache.tez.dag.records.TezVertexID;
 import org.apache.tez.hadoop.shim.HadoopShim;
 import org.apache.tez.runtime.api.ExecutionContext;
 import org.apache.tez.runtime.api.impl.TaskSpec;
@@ -75,15 +64,17 @@ import org.apache.tez.runtime.task.TezTaskRunner2;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Joiner;
-import com.google.common.base.Stopwatch;
-import com.google.common.collect.HashMultimap;
-import com.google.common.collect.Multimap;
-import com.google.common.util.concurrent.FutureCallback;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
+import java.net.InetSocketAddress;
+import java.nio.ByteBuffer;
+import java.security.PrivilegedExceptionAction;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Stack;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
 
 /**
  *
@@ -265,7 +256,7 @@ public class TaskRunnerCallable extends CallableWithNdc<TaskRunner2Result> {
         } finally {
           FileSystem.closeAllForUGI(taskUgi);
           LOG.info("ExecutionTime for Container: " + request.getContainerIdString() + "=" +
-              runtimeWatch.stop().elapsedMillis());
+                  runtimeWatch.stop().elapsedMillis());
           if (LOG.isDebugEnabled()) {
             LOG.debug(
                 "canFinish post completion: " + taskSpec.getTaskAttemptID() + ": " + canFinish());

http://git-wip-us.apache.org/repos/asf/hive/blob/590687bc/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 3fc35bc..376197e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -136,7 +136,7 @@
     <derby.version>10.10.2.0</derby.version>
     <dropwizard.version>3.1.0</dropwizard.version>
     <dropwizard-metrics-hadoop-metrics2-reporter.version>0.1.2</dropwizard-metrics-hadoop-metrics2-reporter.version>
-    <druid.version>0.9.1.1</druid.version>
+    <druid.version>0.9.2</druid.version>
     <guava.version>14.0.1</guava.version>
     <groovy.version>2.4.4</groovy.version>
     <hadoop.version>2.7.2</hadoop.version>

http://git-wip-us.apache.org/repos/asf/hive/blob/590687bc/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
index 54d619c..5ef901f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
@@ -18,20 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
-import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_TEMPORARY_TABLE_STORAGE;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.Serializable;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
+import com.google.common.collect.Lists;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -83,7 +70,19 @@ import org.apache.hive.common.util.HiveStringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Lists;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.Serializable;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_TEMPORARY_TABLE_STORAGE;
 
 /**
  * File Sink operator implementation.

http://git-wip-us.apache.org/repos/asf/hive/blob/590687bc/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 8db833e..37e4b9b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -18,63 +18,10 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
+import com.esotericsoftware.kryo.Kryo;
+import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
-import java.beans.DefaultPersistenceDelegate;
-import java.beans.Encoder;
-import java.beans.Expression;
-import java.beans.Statement;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInput;
-import java.io.EOFException;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.Serializable;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.net.URLDecoder;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-import java.sql.SQLFeatureNotSupportedException;
-import java.sql.SQLTransientException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Random;
-import java.util.Set;
-import java.util.UUID;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Future;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-import java.util.zip.Deflater;
-import java.util.zip.DeflaterOutputStream;
-import java.util.zip.InflaterInputStream;
-
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.WordUtils;
@@ -198,8 +145,58 @@ import org.apache.hive.common.util.ReflectionUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.esotericsoftware.kryo.Kryo;
-import com.google.common.base.Preconditions;
+import java.beans.DefaultPersistenceDelegate;
+import java.beans.Encoder;
+import java.beans.Expression;
+import java.beans.Statement;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInput;
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Serializable;
+import java.net.URI;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.net.URLDecoder;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
+import java.sql.SQLTransientException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Collection;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Random;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.zip.Deflater;
+import java.util.zip.DeflaterOutputStream;
+import java.util.zip.InflaterInputStream;
 
 /**
  * Utilities.
@@ -3095,7 +3092,7 @@ public final class Utilities {
 
     TableDesc tableDesc = work.getAliasToPartnInfo().get(alias).getTableDesc();
     if (tableDesc.isNonNative()) {
-      // if this isn't a hive table we can't create an empty file for it.
+      // if it does not need native storage, we can't create an empty file for it.
       return null;
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/590687bc/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
index 178a2de..c1f6883 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
@@ -18,21 +18,13 @@
 
 package org.apache.hadoop.hive.ql.hooks;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
+import com.google.common.collect.Lists;
+import com.google.common.hash.Hasher;
+import com.google.common.hash.Hashing;
+import com.google.gson.stream.JsonWriter;
 import org.apache.commons.collections.SetUtils;
 import org.apache.commons.io.output.StringBuilderWriter;
 import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -50,11 +42,18 @@ import org.apache.hadoop.hive.ql.optimizer.lineage.LineageCtx.Index;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Lists;
-import com.google.common.hash.Hasher;
-import com.google.common.hash.Hashing;
-import com.google.gson.stream.JsonWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 /**
  * Implementation of a post execute hook that logs lineage info to a log file.

http://git-wip-us.apache.org/repos/asf/hive/blob/590687bc/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
index eaf0abc..d9c2ff4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
@@ -122,6 +122,8 @@ public class Optimizer {
       transformations.add(new SortedDynPartitionOptimizer());
     }
 
+    transformations.add(new SortedDynPartitionTimeGranularityOptimizer());
+
     if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTPPD)) {
       transformations.add(new PartitionPruner());
       transformations.add(new PartitionConditionRemover());