You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@calcite.apache.org by jc...@apache.org on 2017/04/14 07:27:02 UTC

calcite git commit: [CALCITE-1707] Push Extraction filter on Year/Month/Day to druid [Forced Update!]

Repository: calcite
Updated Branches:
  refs/heads/master 851919b2d -> 7c2f5a83c (forced update)


[CALCITE-1707] Push Extraction filter on Year/Month/Day to druid

Close apache/calcite#426


Project: http://git-wip-us.apache.org/repos/asf/calcite/repo
Commit: http://git-wip-us.apache.org/repos/asf/calcite/commit/7c2f5a83
Tree: http://git-wip-us.apache.org/repos/asf/calcite/tree/7c2f5a83
Diff: http://git-wip-us.apache.org/repos/asf/calcite/diff/7c2f5a83

Branch: refs/heads/master
Commit: 7c2f5a83c5c4416a6ec91bb8aa043cd60962ed37
Parents: 059142f
Author: Slim Bouguerra <sl...@gmail.com>
Authored: Thu Apr 13 16:00:18 2017 +0100
Committer: Jesus Camacho Rodriguez <jc...@apache.org>
Committed: Fri Apr 14 08:25:36 2017 +0100

----------------------------------------------------------------------
 .../calcite/rel/rules/DateRangeRules.java       |   7 +-
 .../adapter/druid/DruidDateTimeUtils.java       |   6 +-
 .../calcite/adapter/druid/DruidQuery.java       |  71 ++++--
 .../calcite/adapter/druid/DruidRules.java       |  12 +-
 .../adapter/druid/ExtractionFunctionUtil.java   |  68 ++++++
 .../adapter/druid/TimeExtractionFunction.java   |  11 +-
 .../org/apache/calcite/test/DruidAdapterIT.java | 221 ++++++++++++++-----
 7 files changed, 313 insertions(+), 83 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/calcite/blob/7c2f5a83/core/src/main/java/org/apache/calcite/rel/rules/DateRangeRules.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/rel/rules/DateRangeRules.java b/core/src/main/java/org/apache/calcite/rel/rules/DateRangeRules.java
index 7b30571..1bfb813 100644
--- a/core/src/main/java/org/apache/calcite/rel/rules/DateRangeRules.java
+++ b/core/src/main/java/org/apache/calcite/rel/rules/DateRangeRules.java
@@ -145,7 +145,12 @@ public abstract class DateRangeRules {
       final RexBuilder rexBuilder = filter.getCluster().getRexBuilder();
       RexNode condition = filter.getCondition();
       final Map<String, RangeSet<Calendar>> operandRanges = new HashMap<>();
-      for (TimeUnitRange timeUnit : extractTimeUnits(condition)) {
+      Set<TimeUnitRange> timeUnitRangeSet = extractTimeUnits(condition);
+      if (!timeUnitRangeSet.contains(TimeUnitRange.YEAR)) {
+        // bail out if there is no year extract.
+        return;
+      }
+      for (TimeUnitRange timeUnit : timeUnitRangeSet) {
         condition = condition.accept(
             new ExtractShuttle(rexBuilder, timeUnit, operandRanges));
       }

http://git-wip-us.apache.org/repos/asf/calcite/blob/7c2f5a83/druid/src/main/java/org/apache/calcite/adapter/druid/DruidDateTimeUtils.java
----------------------------------------------------------------------
diff --git a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidDateTimeUtils.java b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidDateTimeUtils.java
index 89c68b3..1312783 100644
--- a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidDateTimeUtils.java
+++ b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidDateTimeUtils.java
@@ -244,7 +244,11 @@ public class DruidDateTimeUtils {
 
   private static Calendar literalValue(RexNode node) {
     if (node instanceof RexLiteral) {
-      return (Calendar) ((RexLiteral) node).getValue();
+      Object value = ((RexLiteral) node).getValue();
+      if (value instanceof  Calendar) {
+        return (Calendar) value;
+      }
+      return null;
     }
     return null;
   }

http://git-wip-us.apache.org/repos/asf/calcite/blob/7c2f5a83/druid/src/main/java/org/apache/calcite/adapter/druid/DruidQuery.java
----------------------------------------------------------------------
diff --git a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidQuery.java b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidQuery.java
index 1b81f67..05567cc 100644
--- a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidQuery.java
+++ b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidQuery.java
@@ -18,6 +18,7 @@ package org.apache.calcite.adapter.druid;
 
 import org.apache.calcite.DataContext;
 import org.apache.calcite.avatica.ColumnMetaData;
+import org.apache.calcite.avatica.util.TimeUnitRange;
 import org.apache.calcite.config.CalciteConnectionConfig;
 import org.apache.calcite.config.CalciteConnectionProperty;
 import org.apache.calcite.interpreter.BindableRel;
@@ -86,6 +87,10 @@ import static org.apache.calcite.sql.SqlKind.INPUT_REF;
  */
 public class DruidQuery extends AbstractRelNode implements BindableRel {
 
+  private static final List<TimeUnitRange> LIST_OF_VALID_TIME_EXTRACT = ImmutableList.of(
+      TimeUnitRange.YEAR,
+      TimeUnitRange.MONTH,
+      TimeUnitRange.DAY);
   protected QuerySpec querySpec;
 
   final RelOptTable table;
@@ -220,11 +225,23 @@ public class DruidQuery extends AbstractRelNode implements BindableRel {
       return areValidFilters(((RexCall) e).getOperands(), true);
     case CAST:
       return isValidCast((RexCall) e, boundedComparator);
+    case EXTRACT:
+      return isValidExtract((RexCall) e);
     default:
       return false;
     }
   }
 
+  private boolean isValidExtract(RexCall call) {
+    assert call.isA(SqlKind.EXTRACT);
+    final RexLiteral flag = (RexLiteral) call.operands.get(0);
+    final TimeUnitRange timeUnit = (TimeUnitRange) flag.getValue();
+    if (timeUnit != null && LIST_OF_VALID_TIME_EXTRACT.contains(timeUnit)) {
+      return true;
+    }
+    return false;
+  }
+
   private boolean areValidFilters(List<RexNode> es, boolean boundedComparator) {
     for (RexNode e : es) {
       if (!isValidFilter(e, boundedComparator)) {
@@ -980,35 +997,44 @@ public class DruidQuery extends AbstractRelNode implements BindableRel {
         final boolean numeric =
             call.getOperands().get(posRef).getType().getFamily()
                 == SqlTypeFamily.NUMERIC;
+
+        final ExtractionFunction extractionFunction = ExtractionFunctionUtil.buildExtraction(call
+            .getOperands().get(posRef));
+        String dimName = tr(e, posRef);
+        if (dimName.equals(DruidConnectionImpl.DEFAULT_RESPONSE_TIMESTAMP_COLUMN)) {
+          // We need to use Druid default column name to refer to the time dimension in a filter
+          dimName = DruidTable.DEFAULT_TIMESTAMP_COLUMN;
+        }
+
         switch (e.getKind()) {
         case EQUALS:
-          return new JsonSelector("selector", tr(e, posRef), tr(e, posConstant));
+          return new JsonSelector("selector", dimName, tr(e, posConstant), extractionFunction);
         case NOT_EQUALS:
           return new JsonCompositeFilter("not",
-              new JsonSelector("selector", tr(e, posRef), tr(e, posConstant)));
+              new JsonSelector("selector", dimName, tr(e, posConstant), extractionFunction));
         case GREATER_THAN:
-          return new JsonBound("bound", tr(e, posRef), tr(e, posConstant),
-              true, null, false, numeric);
+          return new JsonBound("bound", dimName, tr(e, posConstant),
+              true, null, false, numeric, extractionFunction);
         case GREATER_THAN_OR_EQUAL:
-          return new JsonBound("bound", tr(e, posRef), tr(e, posConstant),
-              false, null, false, numeric);
+          return new JsonBound("bound", dimName, tr(e, posConstant),
+              false, null, false, numeric, extractionFunction);
         case LESS_THAN:
-          return new JsonBound("bound", tr(e, posRef), null, false,
-              tr(e, posConstant), true, numeric);
+          return new JsonBound("bound", dimName, null, false,
+              tr(e, posConstant), true, numeric, extractionFunction);
         case LESS_THAN_OR_EQUAL:
-          return new JsonBound("bound", tr(e, posRef), null, false,
-              tr(e, posConstant), false, numeric);
+          return new JsonBound("bound", dimName, null, false,
+              tr(e, posConstant), false, numeric, extractionFunction);
         case IN:
           ImmutableList.Builder<String> listBuilder = ImmutableList.builder();
           for (RexNode rexNode: call.getOperands()) {
             if (rexNode.getKind() == SqlKind.LITERAL) {
-              listBuilder.add(((RexLiteral) rexNode).getValue2().toString());
+              listBuilder.add(((RexLiteral) rexNode).getValue3().toString());
             }
           }
-          return new JsonInFilter("in", tr(e, posRef), listBuilder.build());
+          return new JsonInFilter("in", dimName, listBuilder.build(), extractionFunction);
         case BETWEEN:
-          return new JsonBound("bound", tr(e, posRef), tr(e, 2), false,
-              tr(e, 3), false, numeric);
+          return new JsonBound("bound", dimName, tr(e, 2), false,
+              tr(e, 3), false, numeric, extractionFunction);
         default:
           throw new AssertionError();
         }
@@ -1201,11 +1227,14 @@ public class DruidQuery extends AbstractRelNode implements BindableRel {
   private static class JsonSelector extends JsonFilter {
     private final String dimension;
     private final String value;
+    private final ExtractionFunction extractionFunction;
 
-    private JsonSelector(String type, String dimension, String value) {
+    private JsonSelector(String type, String dimension, String value,
+        ExtractionFunction extractionFunction) {
       super(type);
       this.dimension = dimension;
       this.value = value;
+      this.extractionFunction = extractionFunction;
     }
 
     public void write(JsonGenerator generator) throws IOException {
@@ -1213,6 +1242,7 @@ public class DruidQuery extends AbstractRelNode implements BindableRel {
       generator.writeStringField("type", type);
       generator.writeStringField("dimension", dimension);
       generator.writeStringField("value", value);
+      writeFieldIf(generator, "extractionFn", extractionFunction);
       generator.writeEndObject();
     }
   }
@@ -1226,10 +1256,11 @@ public class DruidQuery extends AbstractRelNode implements BindableRel {
     private final String upper;
     private final boolean upperStrict;
     private final boolean alphaNumeric;
+    private final ExtractionFunction extractionFunction;
 
     private JsonBound(String type, String dimension, String lower,
         boolean lowerStrict, String upper, boolean upperStrict,
-        boolean alphaNumeric) {
+        boolean alphaNumeric, ExtractionFunction extractionFunction) {
       super(type);
       this.dimension = dimension;
       this.lower = lower;
@@ -1237,6 +1268,7 @@ public class DruidQuery extends AbstractRelNode implements BindableRel {
       this.upper = upper;
       this.upperStrict = upperStrict;
       this.alphaNumeric = alphaNumeric;
+      this.extractionFunction = extractionFunction;
     }
 
     public void write(JsonGenerator generator) throws IOException {
@@ -1256,6 +1288,7 @@ public class DruidQuery extends AbstractRelNode implements BindableRel {
       } else {
         generator.writeStringField("ordering", "lexicographic");
       }
+      writeFieldIf(generator, "extractionFn", extractionFunction);
       generator.writeEndObject();
     }
   }
@@ -1292,11 +1325,14 @@ public class DruidQuery extends AbstractRelNode implements BindableRel {
   protected static class JsonInFilter extends JsonFilter {
     private final String dimension;
     private final List<String> values;
+    private final ExtractionFunction extractionFunction;
 
-    private JsonInFilter(String type, String dimension, List<String> values) {
+    private JsonInFilter(String type, String dimension, List<String> values,
+        ExtractionFunction extractionFunction) {
       super(type);
       this.dimension = dimension;
       this.values = values;
+      this.extractionFunction = extractionFunction;
     }
 
     public void write(JsonGenerator generator) throws IOException {
@@ -1304,6 +1340,7 @@ public class DruidQuery extends AbstractRelNode implements BindableRel {
       generator.writeStringField("type", type);
       generator.writeStringField("dimension", dimension);
       writeField(generator, "values", values);
+      writeFieldIf(generator, "extractionFn", extractionFunction);
       generator.writeEndObject();
     }
   }

http://git-wip-us.apache.org/repos/asf/calcite/blob/7c2f5a83/druid/src/main/java/org/apache/calcite/adapter/druid/DruidRules.java
----------------------------------------------------------------------
diff --git a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidRules.java b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidRules.java
index 36e5b40..309d1f2 100644
--- a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidRules.java
+++ b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidRules.java
@@ -205,22 +205,18 @@ public class DruidRules {
         intervals = DruidDateTimeUtils.createInterval(
             query.getRowType().getFieldList().get(timestampFieldIdx).getType(),
             RexUtil.composeConjunction(rexBuilder, triple.getLeft(), false));
-        if (intervals == null) {
-          // We can't push anything useful to Druid.
-          residualPreds.addAll(triple.getLeft());
+        if (intervals == null || intervals.isEmpty()) {
+          // Case we have an filter with extract that can not be written as interval push down
+          triple.getMiddle().addAll(triple.getLeft());
         }
       }
-      if (intervals == null && triple.getMiddle().isEmpty()) {
-        // We can't push anything useful to Druid.
-        return;
-      }
       RelNode newDruidQuery = query;
       if (!triple.getMiddle().isEmpty()) {
         final RelNode newFilter = filter.copy(filter.getTraitSet(), Util.last(query.rels),
             RexUtil.composeConjunction(rexBuilder, triple.getMiddle(), false));
         newDruidQuery = DruidQuery.extendQuery(query, newFilter);
       }
-      if (intervals != null) {
+      if (intervals != null && !intervals.isEmpty()) {
         newDruidQuery = DruidQuery.extendQuery((DruidQuery) newDruidQuery, intervals);
       }
       if (!residualPreds.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/calcite/blob/7c2f5a83/druid/src/main/java/org/apache/calcite/adapter/druid/ExtractionFunctionUtil.java
----------------------------------------------------------------------
diff --git a/druid/src/main/java/org/apache/calcite/adapter/druid/ExtractionFunctionUtil.java b/druid/src/main/java/org/apache/calcite/adapter/druid/ExtractionFunctionUtil.java
new file mode 100644
index 0000000..f3c71f3
--- /dev/null
+++ b/druid/src/main/java/org/apache/calcite/adapter/druid/ExtractionFunctionUtil.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.adapter.druid;
+
+
+import org.apache.calcite.avatica.util.TimeUnitRange;
+import org.apache.calcite.rex.RexCall;
+import org.apache.calcite.rex.RexLiteral;
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.sql.SqlKind;
+
+
+/**
+ * Utility class for extraction function mapping between SQL and Druid.
+ */
+public final class ExtractionFunctionUtil {
+
+  private ExtractionFunctionUtil() {
+  }
+
+  //~ Methods ----------------------------------------------------------------
+
+  /**
+   * This method will be used to build a Druid extraction function out of a SQL EXTRACT rexNode.
+   *
+   * @param rexNode node that might contain an extraction function on time
+   * @return the correspondent Druid extraction function or null if it is not recognisable
+   */
+  public static ExtractionFunction buildExtraction(RexNode rexNode) {
+    if (rexNode instanceof RexCall) {
+      RexCall call = (RexCall) rexNode;
+      if (call.getKind().equals(SqlKind.EXTRACT)) {
+        final RexLiteral flag = (RexLiteral) call.operands.get(0);
+        final TimeUnitRange timeUnit = (TimeUnitRange) flag.getValue();
+        if (timeUnit == null) {
+          return null;
+        }
+        switch (timeUnit) {
+        case YEAR:
+          return TimeExtractionFunction.createFromGranularity(Granularity.YEAR);
+        case MONTH:
+          return TimeExtractionFunction.createFromGranularity(Granularity.MONTH);
+        case DAY:
+          return TimeExtractionFunction.createFromGranularity(Granularity.DAY);
+        default:
+          return null;
+        }
+      }
+    }
+    return null;
+  }
+}
+
+// End ExtractionFunctionUtil.java

http://git-wip-us.apache.org/repos/asf/calcite/blob/7c2f5a83/druid/src/main/java/org/apache/calcite/adapter/druid/TimeExtractionFunction.java
----------------------------------------------------------------------
diff --git a/druid/src/main/java/org/apache/calcite/adapter/druid/TimeExtractionFunction.java b/druid/src/main/java/org/apache/calcite/adapter/druid/TimeExtractionFunction.java
index 4cd1f12..d3fbb28 100644
--- a/druid/src/main/java/org/apache/calcite/adapter/druid/TimeExtractionFunction.java
+++ b/druid/src/main/java/org/apache/calcite/adapter/druid/TimeExtractionFunction.java
@@ -19,6 +19,7 @@ package org.apache.calcite.adapter.druid;
 import com.fasterxml.jackson.core.JsonGenerator;
 
 import java.io.IOException;
+import java.util.Locale;
 
 import static org.apache.calcite.adapter.druid.DruidQuery.writeFieldIf;
 
@@ -51,7 +52,7 @@ public class TimeExtractionFunction implements ExtractionFunction {
     writeFieldIf(generator, "format", format);
     writeFieldIf(generator, "granularity", granularity);
     writeFieldIf(generator, "timeZone", timeZone);
-    writeFieldIf(generator, "local", local);
+    writeFieldIf(generator, "locale", local);
     generator.writeEndObject();
   }
 
@@ -74,13 +75,11 @@ public class TimeExtractionFunction implements ExtractionFunction {
   public static TimeExtractionFunction createFromGranularity(Granularity granularity) {
     switch (granularity) {
     case DAY:
-      return new TimeExtractionFunction("dd", null, "UTC", null);
+      return new TimeExtractionFunction("d", null, "UTC", Locale.getDefault().toLanguageTag());
     case MONTH:
-      return new TimeExtractionFunction("MM", null, "UTC", null);
+      return new TimeExtractionFunction("M", null, "UTC", Locale.getDefault().toLanguageTag());
     case YEAR:
-      return new TimeExtractionFunction("yyyy", null, "UTC", null);
-    case HOUR:
-      return new TimeExtractionFunction("hh", null, "UTC", null);
+      return new TimeExtractionFunction("yyyy", null, "UTC", Locale.getDefault().toLanguageTag());
     default:
       throw new AssertionError("Extraction " + granularity.value + " is not valid");
     }

http://git-wip-us.apache.org/repos/asf/calcite/blob/7c2f5a83/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
----------------------------------------------------------------------
diff --git a/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java b/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
index 60952a7..185e3c6 100644
--- a/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
+++ b/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
@@ -1416,10 +1416,12 @@ public class DruidAdapterIT {
         + "from \"foodmart\"\n"
         + "where extract(year from \"timestamp\") = 1997\n"
         + "and extract(month from \"timestamp\") in (4, 6)\n";
-    final String explain = "EnumerableInterpreter\n"
-        + "  BindableAggregate(group=[{}], C=[COUNT()])\n"
-        + "    BindableFilter(condition=[AND(>=(/INT(Reinterpret($0), 86400000), 1997-01-01), <(/INT(Reinterpret($0), 86400000), 1998-01-01), OR(AND(>=(/INT(Reinterpret($0), 86400000), 1997-04-01), <(/INT(Reinterpret($0), 86400000), 1997-05-01)), AND(>=(/INT(Reinterpret($0), 86400000), 1997-06-01), <(/INT(Reinterpret($0), 86400000), 1997-07-01))))])\n"
-        + "      DruidQuery(table=[[foodmart, foodmart]], intervals=[[1900-01-09T00:00:00.000/2992-01-10T00:00:00.000]], projects=[[$0]])";
+    final String explain = "PLAN=EnumerableInterpreter\n"
+        + "  DruidQuery(table=[[foodmart, foodmart]], "
+        + "intervals=[[1900-01-09T00:00:00.000/2992-01-10T00:00:00.000]], filter=[AND(="
+        + "(EXTRACT_DATE(FLAG(YEAR), /INT(Reinterpret($0), 86400000)), 1997), OR(=(EXTRACT_DATE"
+        + "(FLAG(MONTH), /INT(Reinterpret($0), 86400000)), 4), =(EXTRACT_DATE(FLAG(MONTH), /INT"
+        + "(Reinterpret($0), 86400000)), 6)))], groups=[{}], aggs=[[COUNT()]])";
     sql(sql)
         .explainContains(explain)
         .returnsUnordered("C=13500");
@@ -1533,7 +1535,7 @@ public class DruidAdapterIT {
                 "{'type':'extraction',"
                     + "'dimension':'__time','outputName':'extract_0',"
                     + "'extractionFn':{'type':'timeFormat','format':'yyyy',"
-                    + "'timeZone':'UTC'}}"))
+                    + "'timeZone':'UTC','locale':'en-US'}}"))
         .returnsUnordered("year=1997; product_id=1016");
   }
 
@@ -1549,10 +1551,10 @@ public class DruidAdapterIT {
                 ",'granularity':'all'",
                 "{'type':'extraction',"
                     + "'dimension':'__time','outputName':'extract_0',"
-                    + "'extractionFn':{'type':'timeFormat','format':'MM',"
-                    + "'timeZone':'UTC'}}"))
-        .returnsUnordered("month=01; product_id=1016", "month=02; product_id=1016",
-            "month=03; product_id=1016", "month=04; product_id=1016", "month=05; product_id=1016");
+                    + "'extractionFn':{'type':'timeFormat','format':'M',"
+                    + "'timeZone':'UTC','locale':'en-US'}}"))
+        .returnsUnordered("month=1; product_id=1016", "month=2; product_id=1016",
+            "month=3; product_id=1016", "month=4; product_id=1016", "month=5; product_id=1016");
   }
 
   @Test public void testPushAggregateOnTimeWithExtractDay() {
@@ -1568,9 +1570,9 @@ public class DruidAdapterIT {
                 ",'granularity':'all'",
                 "{'type':'extraction',"
                     + "'dimension':'__time','outputName':'extract_0',"
-                    + "'extractionFn':{'type':'timeFormat','format':'dd',"
-                    + "'timeZone':'UTC'}}"))
-        .returnsUnordered("day=02; product_id=1016", "day=10; product_id=1016",
+                    + "'extractionFn':{'type':'timeFormat','format':'d',"
+                    + "'timeZone':'UTC','locale':'en-US'}}"))
+        .returnsUnordered("day=2; product_id=1016", "day=10; product_id=1016",
             "day=13; product_id=1016", "day=16; product_id=1016");
   }
 
@@ -1612,24 +1614,24 @@ public class DruidAdapterIT {
                 ",'granularity':'all'",
                 "{'type':'extraction',"
                     + "'dimension':'__time','outputName':'extract_0',"
-                    + "'extractionFn':{'type':'timeFormat','format':'dd',"
-                    + "'timeZone':'UTC'}}", "{'type':'extraction',"
+                    + "'extractionFn':{'type':'timeFormat','format':'d',"
+                    + "'timeZone':'UTC','locale':'en-US'}}", "{'type':'extraction',"
                     + "'dimension':'__time','outputName':'extract_1',"
-                    + "'extractionFn':{'type':'timeFormat','format':'MM',"
-                    + "'timeZone':'UTC'}}", "{'type':'extraction',"
+                    + "'extractionFn':{'type':'timeFormat','format':'M',"
+                    + "'timeZone':'UTC','locale':'en-US'}}", "{'type':'extraction',"
                     + "'dimension':'__time','outputName':'extract_2',"
                     + "'extractionFn':{'type':'timeFormat','format':'yyyy',"
-                    + "'timeZone':'UTC'}}"))
+                    + "'timeZone':'UTC','locale':'en-US'}}"))
         .explainContains("PLAN=EnumerableInterpreter\n"
             + "  DruidQuery(table=[[foodmart, foodmart]], "
             + "intervals=[[1997-01-01T00:00:00.001/1997-01-20T00:00:00.000]], filter=[=($1, 1016)"
             + "], projects=[[EXTRACT_DATE(FLAG(DAY), /INT(Reinterpret($0), 86400000)), "
             + "EXTRACT_DATE(FLAG(MONTH), /INT(Reinterpret($0), 86400000)), EXTRACT_DATE(FLAG"
             + "(YEAR), /INT(Reinterpret($0), 86400000)), $1]], groups=[{0, 1, 2, 3}], aggs=[[]])\n")
-        .returnsUnordered("day=02; month=01; year=1997; product_id=1016",
-            "day=10; month=01; year=1997; product_id=1016",
-            "day=13; month=01; year=1997; product_id=1016",
-            "day=16; month=01; year=1997; product_id=1016");
+        .returnsUnordered("day=2; month=1; year=1997; product_id=1016",
+            "day=10; month=1; year=1997; product_id=1016",
+            "day=13; month=1; year=1997; product_id=1016",
+            "day=16; month=1; year=1997; product_id=1016");
   }
 
   @Test public void testPushAggregateOnTimeWithExtractYearMonthDayWithOutRenaming() {
@@ -1646,24 +1648,24 @@ public class DruidAdapterIT {
             druidChecker(
                 ",'granularity':'all'", "{'type':'extraction',"
                     + "'dimension':'__time','outputName':'extract_0',"
-                    + "'extractionFn':{'type':'timeFormat','format':'dd',"
-                    + "'timeZone':'UTC'}}", "{'type':'extraction',"
+                    + "'extractionFn':{'type':'timeFormat','format':'d',"
+                    + "'timeZone':'UTC','locale':'en-US'}}", "{'type':'extraction',"
                     + "'dimension':'__time','outputName':'extract_1',"
-                    + "'extractionFn':{'type':'timeFormat','format':'MM',"
-                    + "'timeZone':'UTC'}}", "{'type':'extraction',"
+                    + "'extractionFn':{'type':'timeFormat','format':'M',"
+                    + "'timeZone':'UTC','locale':'en-US'}}", "{'type':'extraction',"
                     + "'dimension':'__time','outputName':'extract_2',"
                     + "'extractionFn':{'type':'timeFormat','format':'yyyy',"
-                    + "'timeZone':'UTC'}}"))
+                    + "'timeZone':'UTC','locale':'en-US'}}"))
         .explainContains("PLAN=EnumerableInterpreter\n"
             + "  DruidQuery(table=[[foodmart, foodmart]], "
             + "intervals=[[1997-01-01T00:00:00.001/1997-01-20T00:00:00.000]], filter=[=($1, 1016)"
             + "], projects=[[EXTRACT_DATE(FLAG(DAY), /INT(Reinterpret($0), 86400000)), "
             + "EXTRACT_DATE(FLAG(MONTH), /INT(Reinterpret($0), 86400000)), EXTRACT_DATE(FLAG"
             + "(YEAR), /INT(Reinterpret($0), 86400000)), $1]], groups=[{0, 1, 2, 3}], aggs=[[]])\n")
-        .returnsUnordered("EXPR$0=02; EXPR$1=01; EXPR$2=1997; product_id=1016",
-            "EXPR$0=10; EXPR$1=01; EXPR$2=1997; product_id=1016",
-            "EXPR$0=13; EXPR$1=01; EXPR$2=1997; product_id=1016",
-            "EXPR$0=16; EXPR$1=01; EXPR$2=1997; product_id=1016");
+        .returnsUnordered("EXPR$0=2; EXPR$1=1; EXPR$2=1997; product_id=1016",
+            "EXPR$0=10; EXPR$1=1; EXPR$2=1997; product_id=1016",
+            "EXPR$0=13; EXPR$1=1; EXPR$2=1997; product_id=1016",
+            "EXPR$0=16; EXPR$1=1; EXPR$2=1997; product_id=1016");
   }
 
   @Test public void testPushAggregateOnTimeWithExtractWithOutRenaming() {
@@ -1679,14 +1681,14 @@ public class DruidAdapterIT {
             druidChecker(
                 ",'granularity':'all'", "{'type':'extraction',"
                     + "'dimension':'__time','outputName':'extract_0',"
-                    + "'extractionFn':{'type':'timeFormat','format':'dd',"
-                    + "'timeZone':'UTC'}}"))
+                    + "'extractionFn':{'type':'timeFormat','format':'d',"
+                    + "'timeZone':'UTC','locale':'en-US'}}"))
         .explainContains("PLAN=EnumerableInterpreter\n"
             + "  DruidQuery(table=[[foodmart, foodmart]], "
             + "intervals=[[1997-01-01T00:00:00.001/1997-01-20T00:00:00.000]], filter=[=($1, 1016)], "
             + "projects=[[EXTRACT_DATE(FLAG(DAY), /INT(Reinterpret($0), 86400000)), $1]], "
             + "groups=[{0, 1}], aggs=[[]])\n")
-        .returnsUnordered("EXPR$0=02; dayOfMonth=1016", "EXPR$0=10; dayOfMonth=1016",
+        .returnsUnordered("EXPR$0=2; dayOfMonth=1016", "EXPR$0=10; dayOfMonth=1016",
             "EXPR$0=13; dayOfMonth=1016", "EXPR$0=16; dayOfMonth=1016");
   }
 
@@ -1695,28 +1697,147 @@ public class DruidAdapterIT {
         + "where EXTRACT( year from \"timestamp\") = 1997 and "
         + "\"cases_per_pallet\" >= 8 and \"cases_per_pallet\" <= 10 and "
         + "\"units_per_case\" < 15 ";
-    String druidQuery = "{'queryType':'select','dataSource':'foodmart','descending':false,"
-        + "'intervals':['1900-01-09T00:00:00.000/2992-01-10T00:00:00.000'],"
-        + "'filter':{'type':'and',"
+    String druidQuery = "{'queryType':'timeseries','dataSource':'foodmart',"
+        + "'descending':false,'granularity':'all','filter':{'type':'and',"
         + "'fields':[{'type':'bound','dimension':'cases_per_pallet','lower':'8',"
-        + "'lowerStrict':false,'ordering':'numeric'},"
-        + "{'type':'bound','dimension':'cases_per_pallet','upper':'10','upperStrict':false,"
-        + "'ordering':'numeric'},{'type':'bound','dimension':'units_per_case','upper':'15',"
-        + "'upperStrict':true,'ordering':'numeric'}]},'dimensions':[],'metrics':['store_sales'],"
-        + "'granularity':'all','pagingSpec':{'threshold':16384,'fromNext':true},"
-        + "'context':{'druid.query.fetch':false}}";
+        + "'lowerStrict':false,'ordering':'numeric'},{'type':'bound',"
+        + "'dimension':'cases_per_pallet','upper':'10','upperStrict':false,"
+        + "'ordering':'numeric'},{'type':'bound','dimension':'units_per_case',"
+        + "'upper':'15','upperStrict':true,'ordering':'numeric'},"
+        + "{'type':'selector','dimension':'__time','value':'1997',"
+        + "'extractionFn':{'type':'timeFormat','format':'yyyy','timeZone':'UTC',"
+        + "'locale':'en-US'}}]},'aggregations':[{'type':'doubleSum',"
+        + "'name':'EXPR$0','fieldName':'store_sales'}],"
+        + "'intervals':['1900-01-09T00:00:00.000/2992-01-10T00:00:00.000'],"
+        + "'context':{'skipEmptyBuckets':true}}";
     sql(sql)
-        .queryContains(druidChecker(druidQuery))
         .explainContains("PLAN=EnumerableInterpreter\n"
-             + "  BindableAggregate(group=[{}], EXPR$0=[SUM($1)])\n"
-             + "    BindableFilter(condition=[AND(>=(/INT(Reinterpret($0), 86400000), 1997-01-01), "
-             + "<(/INT(Reinterpret($0), 86400000), 1998-01-01))])\n"
-             + "      DruidQuery(table=[[foodmart, foodmart]], "
-             + "intervals=[[1900-01-09T00:00:00.000/2992-01-10T00:00:00.000]], "
-             + "filter=[AND(>=(CAST($11):BIGINT, 8), <=(CAST($11):BIGINT, 10), "
-             + "<(CAST($10):BIGINT, 15))], projects=[[$0, $90]])\n")
+            + "  DruidQuery(table=[[foodmart, foodmart]], "
+            + "intervals=[[1900-01-09T00:00:00.000/2992-01-10T00:00:00.000]], filter=[AND(>=(CAST"
+            + "($11):BIGINT, 8), <=(CAST($11):BIGINT, 10), <(CAST($10):BIGINT, 15), =(EXTRACT_DATE"
+            + "(FLAG(YEAR), /INT(Reinterpret($0), 86400000)), 1997))], groups=[{}], "
+            + "aggs=[[SUM($90)]])")
+        .queryContains(druidChecker(druidQuery))
         .returnsUnordered("EXPR$0=75364.09998679161");
   }
+
+  @Test public void testPushOfFilterExtractionOnDayAndMonth() {
+    String sql = "SELECT \"product_id\" , EXTRACT(day from \"timestamp\"), EXTRACT(month from "
+        + "\"timestamp\") from \"foodmart\" WHERE  EXTRACT(day from \"timestamp\") >= 30 AND "
+        + "EXTRACT(month from \"timestamp\") = 11 "
+        + "AND  \"product_id\" >= 1549 group by \"product_id\", EXTRACT(day from "
+        + "\"timestamp\"), EXTRACT(month from \"timestamp\")";
+    sql(sql)
+        .queryContains(
+            druidChecker("{'queryType':'groupBy','dataSource':'foodmart',"
+                + "'granularity':'all','dimensions':[{'type':'default',"
+                + "'dimension':'product_id'},{'type':'extraction','dimension':'__time',"
+                + "'outputName':'extract_0','extractionFn':{'type':'timeFormat',"
+                + "'format':'d','timeZone':'UTC','locale':'en-US'}},{'type':'extraction',"
+                + "'dimension':'__time','outputName':'extract_1',"
+                + "'extractionFn':{'type':'timeFormat','format':'M','timeZone':'UTC',"
+                + "'locale':'en-US'}}],'limitSpec':{'type':'default'},"
+                + "'filter':{'type':'and','fields':[{'type':'bound',"
+                + "'dimension':'product_id','lower':'1549','lowerStrict':false,"
+                + "'ordering':'numeric'},{'type':'bound','dimension':'__time',"
+                + "'lower':'30','lowerStrict':false,'ordering':'numeric',"
+                + "'extractionFn':{'type':'timeFormat','format':'d','timeZone':'UTC',"
+                + "'locale':'en-US'}},{'type':'selector','dimension':'__time',"
+                + "'value':'11','extractionFn':{'type':'timeFormat','format':'M',"
+                + "'timeZone':'UTC','locale':'en-US'}}]},'aggregations':[{'type':'longSum',"
+                + "'name':'dummy_agg','fieldName':'dummy_agg'}],"
+                + "'intervals':['1900-01-09T00:00:00.000/2992-01-10T00:00:00.000']}"))
+        .returnsUnordered("product_id=1549; EXPR$1=30; EXPR$2=11",
+            "product_id=1553; EXPR$1=30; EXPR$2=11");
+  }
+
+  @Test public void testPushOfFilterExtractionOnDayAndMonthAndYear() {
+    String sql = "SELECT \"product_id\" , EXTRACT(day from \"timestamp\"), EXTRACT(month from "
+        + "\"timestamp\") , EXTRACT(year from \"timestamp\") from \"foodmart\" "
+        + "WHERE  EXTRACT(day from \"timestamp\") >= 30 AND EXTRACT(month from \"timestamp\") = 11 "
+        + "AND  \"product_id\" >= 1549 AND EXTRACT(year from \"timestamp\") = 1997"
+        + "group by \"product_id\", EXTRACT(day from \"timestamp\"), "
+        + "EXTRACT(month from \"timestamp\"), EXTRACT(year from \"timestamp\")";
+    sql(sql)
+        .queryContains(
+            druidChecker("{'queryType':'groupBy','dataSource':'foodmart',"
+                + "'granularity':'all','dimensions':[{'type':'default',"
+                + "'dimension':'product_id'},{'type':'extraction','dimension':'__time',"
+                + "'outputName':'extract_0','extractionFn':{'type':'timeFormat',"
+                + "'format':'d','timeZone':'UTC','locale':'en-US'}},{'type':'extraction',"
+                + "'dimension':'__time','outputName':'extract_1',"
+                + "'extractionFn':{'type':'timeFormat','format':'M','timeZone':'UTC',"
+                + "'locale':'en-US'}},{'type':'extraction','dimension':'__time',"
+                + "'outputName':'extract_2','extractionFn':{'type':'timeFormat',"
+                + "'format':'yyyy','timeZone':'UTC','locale':'en-US'}}],"
+                + "'limitSpec':{'type':'default'},'filter':{'type':'and',"
+                + "'fields':[{'type':'bound','dimension':'product_id','lower':'1549',"
+                + "'lowerStrict':false,'ordering':'numeric'},{'type':'bound',"
+                + "'dimension':'__time','lower':'30','lowerStrict':false,"
+                + "'ordering':'numeric','extractionFn':{'type':'timeFormat','format':'d',"
+                + "'timeZone':'UTC','locale':'en-US'}},{'type':'selector',"
+                + "'dimension':'__time','value':'11','extractionFn':{'type':'timeFormat',"
+                + "'format':'M','timeZone':'UTC','locale':'en-US'}},{'type':'selector',"
+                + "'dimension':'__time','value':'1997','extractionFn':{'type':'timeFormat',"
+                + "'format':'yyyy','timeZone':'UTC','locale':'en-US'}}]},"
+                + "'aggregations':[{'type':'longSum','name':'dummy_agg',"
+                + "'fieldName':'dummy_agg'}],"
+                + "'intervals':['1900-01-09T00:00:00.000/2992-01-10T00:00:00.000']}"))
+        .returnsUnordered("product_id=1549; EXPR$1=30; EXPR$2=11; EXPR$3=1997",
+            "product_id=1553; EXPR$1=30; EXPR$2=11; EXPR$3=1997");
+  }
+
+  @Test public void testFilterExtractionOnMonthWithBetween() {
+    String sqlQuery = "SELECT \"product_id\", EXTRACT(month from \"timestamp\") FROM \"foodmart\""
+        + " WHERE EXTRACT(month from \"timestamp\") BETWEEN 10 AND 11 AND  \"product_id\" >= 1558"
+        + " GROUP BY \"product_id\", EXTRACT(month from \"timestamp\")";
+    String druidQuery = "{'queryType':'groupBy','dataSource':'foodmart',"
+        + "'granularity':'all','dimensions':[{'type':'default',"
+        + "'dimension':'product_id'},{'type':'extraction','dimension':'__time',"
+        + "'outputName':'extract_0','extractionFn':{'type':'timeFormat',"
+        + "'format':'M','timeZone':'UTC','locale':'en-US'}}],"
+        + "'limitSpec':{'type':'default'},'filter':{'type':'and',"
+        + "'fields':[{'type':'bound','dimension':'product_id','lower':'1558',"
+        + "'lowerStrict':false,'ordering':'numeric'},{'type':'bound',"
+        + "'dimension':'__time','lower':'10','lowerStrict':false,"
+        + "'ordering':'numeric','extractionFn':{'type':'timeFormat','format':'M',"
+        + "'timeZone':'UTC','locale':'en-US'}},{'type':'bound',"
+        + "'dimension':'__time','upper':'11','upperStrict':false,"
+        + "'ordering':'numeric','extractionFn':{'type':'timeFormat','format':'M',"
+        + "'timeZone':'UTC','locale':'en-US'}}]},'aggregations':[{'type':'longSum',"
+        + "'name':'dummy_agg','fieldName':'dummy_agg'}],"
+        + "'intervals':['1900-01-09T00:00:00.000/2992-01-10T00:00:00.000']}";
+    sql(sqlQuery)
+        .returnsUnordered("product_id=1558; EXPR$1=10", "product_id=1558; EXPR$1=11",
+            "product_id=1559; EXPR$1=11")
+        .queryContains(druidChecker(druidQuery));
+  }
+
+  @Test public void testFilterExtractionOnMonthWithIn() {
+    String sqlQuery = "SELECT \"product_id\", EXTRACT(month from \"timestamp\") FROM \"foodmart\""
+        + " WHERE EXTRACT(month from \"timestamp\") IN (10, 11) AND  \"product_id\" >= 1558"
+        + " GROUP BY \"product_id\", EXTRACT(month from \"timestamp\")";
+    sql(sqlQuery)
+        .queryContains(
+            druidChecker("{'queryType':'groupBy',"
+                + "'dataSource':'foodmart','granularity':'all',"
+                + "'dimensions':[{'type':'default','dimension':'product_id'},"
+                + "{'type':'extraction','dimension':'__time','outputName':'extract_0',"
+                + "'extractionFn':{'type':'timeFormat','format':'M','timeZone':'UTC',"
+                + "'locale':'en-US'}}],'limitSpec':{'type':'default'},"
+                + "'filter':{'type':'and','fields':[{'type':'bound',"
+                + "'dimension':'product_id','lower':'1558','lowerStrict':false,"
+                + "'ordering':'numeric'},{'type':'or','fields':[{'type':'selector',"
+                + "'dimension':'__time','value':'10','extractionFn':{'type':'timeFormat',"
+                + "'format':'M','timeZone':'UTC','locale':'en-US'}},{'type':'selector',"
+                + "'dimension':'__time','value':'11','extractionFn':{'type':'timeFormat',"
+                + "'format':'M','timeZone':'UTC','locale':'en-US'}}]}]},"
+                + "'aggregations':[{'type':'longSum','name':'dummy_agg',"
+                + "'fieldName':'dummy_agg'}],"
+                + "'intervals':['1900-01-09T00:00:00.000/2992-01-10T00:00:00.000']}"))
+        .returnsUnordered("product_id=1558; EXPR$1=10", "product_id=1558; EXPR$1=11",
+            "product_id=1559; EXPR$1=11");
+  }
 }
 
 // End DruidAdapterIT.java