You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by pr...@apache.org on 2016/09/27 11:24:18 UTC

lens git commit: LENS-1224: Lens Druid sql rewriter changes

Repository: lens
Updated Branches:
  refs/heads/master f1a959bdd -> 241603cf0


LENS-1224: Lens Druid sql rewriter changes


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/241603cf
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/241603cf
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/241603cf

Branch: refs/heads/master
Commit: 241603cf07fd64607c34629d149f6c1dc627eecd
Parents: f1a959b
Author: Rajitha R <ra...@gmail.com>
Authored: Tue Sep 27 16:39:46 2016 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Tue Sep 27 16:39:46 2016 +0530

----------------------------------------------------------------------
 .../lens/cube/parse/BetweenTimeRangeWriter.java |   6 +-
 .../lens/cube/parse/CubeQueryConfUtil.java      |   2 +
 .../org/apache/lens/cube/parse/HQLParser.java   | 114 ++---
 .../cube/parse/TestBetweenTimeRangeWriter.java  |  35 +-
 .../lens/cube/parse/TestTimeRangeWriter.java    |  26 +-
 .../lens/driver/jdbc/ColumnarSQLRewriter.java   |  54 +--
 .../lens/driver/jdbc/DruidSQLRewriter.java      | 260 ++++++++++++
 .../lens/driver/jdbc/TestDruidSQLRewriter.java  | 411 +++++++++++++++++++
 .../drivers/jdbc/druid/jdbcdriver-site.xml      |  66 +++
 9 files changed, 892 insertions(+), 82 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/241603cf/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
index a5b26c4..046149b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
@@ -37,8 +37,12 @@ public class BetweenTimeRangeWriter implements TimeRangeWriter {
     if (rangeParts.size() == 0) {
       return "";
     }
+    //Flag to check if only between range needs to be used
+    boolean useBetweenOnly = cubeQueryContext.getConf().getBoolean(CubeQueryConfUtil.BETWEEN_ONLY_TIME_RANGE_WRITER,
+      CubeQueryConfUtil.DEFAULT_BETWEEN_ONLY_TIME_RANGE_WRITER);
+
     StringBuilder partStr = new StringBuilder();
-    if (rangeParts.size() == 1) {
+    if (!useBetweenOnly && rangeParts.size() == 1) {
       partStr.append("(");
       String partFilter =
         TimeRangeUtils.getTimeRangePartitionFilter(rangeParts.iterator().next(), cubeQueryContext, tableName);

http://git-wip-us.apache.org/repos/asf/lens/blob/241603cf/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
index 49ed5ef..f20f105 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
@@ -103,6 +103,8 @@ public final class CubeQueryConfUtil {
   public static final String LIGHTEST_FACT_FIRST = "lens.cube.query.pick.lightest.fact.first";
   public static final boolean DEFAULT_LIGHTEST_FACT_FIRST = false;
   public static final String TIME_RANGE_WRITER_CLASS = "lens.cube.query.time.range.writer.class";
+  public static final boolean DEFAULT_BETWEEN_ONLY_TIME_RANGE_WRITER = false;
+  public static final String BETWEEN_ONLY_TIME_RANGE_WRITER = "lens.cube.query.between.only.time.range.writer";
   public static final Class<? extends TimeRangeWriter> DEFAULT_TIME_RANGE_WRITER = ORTimeRangeWriter.class
     .asSubclass(TimeRangeWriter.class);
   public static final String PART_WHERE_CLAUSE_DATE_FORMAT = "lens.cube.query.partition.where.clause.format";

http://git-wip-us.apache.org/repos/asf/lens/blob/241603cf/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index 68cdcef..6e52d32 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -342,13 +342,17 @@ public final class HQLParser {
     return P_WSPACE.matcher(text).find();
   }
 
+  public static void toInfixString(ASTNode root, StringBuilder buf) {
+    toInfixString(root, buf, AppendMode.LOWER_CASE);
+  }
+
   /**
    * Recursively reconstruct query string given a query AST
    *
    * @param root root node
    * @param buf  preallocated builder where the reconstructed string will be written
    */
-  public static void toInfixString(ASTNode root, StringBuilder buf) {
+  public static void toInfixString(ASTNode root, StringBuilder buf, AppendMode appendMode) {
     if (root == null) {
       return;
     }
@@ -369,13 +373,13 @@ public final class HQLParser {
         // (example : year as alias) and in such case queries can fail on certain DBs if the alias in not back quoted
         buf.append(" as `").append(rootText).append("`");
       } else {
-        buf.append(rootText == null ? "" : rootText.toLowerCase());
+        buf.append(rootText == null ? "" : appendMode.convert(rootText));
       }
 
     } else if (TOK_ALLCOLREF == rootType) {
       if (root.getChildCount() > 0) {
         for (int i = 0; i < root.getChildCount(); i++) {
-          toInfixString((ASTNode) root.getChild(i), buf);
+          toInfixString((ASTNode) root.getChild(i), buf, appendMode);
         }
         buf.append(".");
       }
@@ -383,7 +387,7 @@ public final class HQLParser {
     } else if (TOK_FUNCTIONSTAR == rootType) {
       if (root.getChildCount() > 0) {
         for (int i = 0; i < root.getChildCount(); i++) {
-          toInfixString((ASTNode) root.getChild(i), buf);
+          toInfixString((ASTNode) root.getChild(i), buf, appendMode);
         }
       }
       buf.append("(*)");
@@ -398,7 +402,7 @@ public final class HQLParser {
       }
 
       for (int i = 0; i < root.getChildCount(); i++) {
-        toInfixString((ASTNode) root.getChild(i), buf);
+        toInfixString((ASTNode) root.getChild(i), buf, appendMode);
       }
 
     } else if (BINARY_OPERATORS.contains(rootType)) {
@@ -413,29 +417,29 @@ public final class HQLParser {
       if (MINUS == rootType && root.getChildCount() == 1) {
         // If minus has only one child, then it's a unary operator.
         // Add Operator name first
-        buf.append(rootText.toLowerCase());
+        buf.append(appendMode.convert(rootText));
         // Operand
-        toInfixString((ASTNode) root.getChild(0), buf);
+        toInfixString((ASTNode) root.getChild(0), buf, appendMode);
       } else {
         // Left operand
-        toInfixString((ASTNode) root.getChild(0), buf);
+        toInfixString((ASTNode) root.getChild(0), buf, appendMode);
         // Operator name
         if (rootType != DOT) {
-          buf.append(' ').append(rootText.toLowerCase()).append(' ');
+          buf.append(' ').append(appendMode.convert(rootText)).append(' ');
         } else {
-          buf.append(rootText.toLowerCase());
+          buf.append(appendMode.convert(rootText));
         }
         // Right operand
-        toInfixString((ASTNode) root.getChild(1), buf);
+        toInfixString((ASTNode) root.getChild(1), buf, appendMode);
       }
       if (surround) {
         buf.append(")");
       }
     } else if (LSQUARE == rootType) {
       // square brackets for array and map types
-      toInfixString((ASTNode) root.getChild(0), buf);
+      toInfixString((ASTNode) root.getChild(0), buf, appendMode);
       buf.append("[");
-      toInfixString((ASTNode) root.getChild(1), buf);
+      toInfixString((ASTNode) root.getChild(1), buf, appendMode);
       buf.append("]");
     } else if (PRIMITIVE_TYPES.contains(rootType)) {
       if (rootType == TOK_TINYINT) {
@@ -486,17 +490,17 @@ public final class HQLParser {
       }
     } else if (TOK_FUNCTION == root.getType()) {
       // Handle UDFs, conditional operators.
-      functionString(root, buf);
+      functionString(root, buf, appendMode);
 
     } else if (TOK_FUNCTIONDI == rootType) {
       // Distinct is a different case.
       String fname = root.getChild(0).getText();
 
-      buf.append(fname.toLowerCase()).append("(distinct ");
+      buf.append(appendMode.convert(fname)).append("( distinct ");
 
       // Arguments to distinct separated by comma
       for (int i = 1; i < root.getChildCount(); i++) {
-        toInfixString((ASTNode) root.getChild(i), buf);
+        toInfixString((ASTNode) root.getChild(i), buf, appendMode);
         if (i != root.getChildCount() - 1) {
           buf.append(", ");
         }
@@ -506,7 +510,7 @@ public final class HQLParser {
     } else if (TOK_TABSORTCOLNAMEDESC == rootType || TOK_TABSORTCOLNAMEASC == rootType) {
       for (int i = 0; i < root.getChildCount(); i++) {
         StringBuilder orderByCol = new StringBuilder();
-        toInfixString((ASTNode) root.getChild(i), orderByCol);
+        toInfixString((ASTNode) root.getChild(i), orderByCol, appendMode);
         String colStr = orderByCol.toString().trim();
         if (colStr.startsWith("(") && colStr.endsWith(")")) {
           colStr = colStr.substring(1, colStr.length() - 1);
@@ -516,7 +520,7 @@ public final class HQLParser {
       buf.append(rootType == TOK_TABSORTCOLNAMEDESC ? " desc" : " asc");
     } else if (TOK_SELECT == rootType || TOK_ORDERBY == rootType || TOK_GROUPBY == rootType) {
       for (int i = 0; i < root.getChildCount(); i++) {
-        toInfixString((ASTNode) root.getChild(i), buf);
+        toInfixString((ASTNode) root.getChild(i), buf, appendMode);
         if (i != root.getChildCount() - 1) {
           buf.append(", ");
         }
@@ -525,7 +529,7 @@ public final class HQLParser {
     } else if (TOK_SELECTDI == rootType) {
       buf.append(" distinct ");
       for (int i = 0; i < root.getChildCount(); i++) {
-        toInfixString((ASTNode) root.getChild(i), buf);
+        toInfixString((ASTNode) root.getChild(i), buf, appendMode);
         if (i != root.getChildCount() - 1) {
           buf.append(", ");
         }
@@ -535,23 +539,24 @@ public final class HQLParser {
       StringBuilder sb = new StringBuilder();
       boolean local = false;
       for (int i = 0; i < root.getChildCount(); i++) {
+
         if (root.getChild(i).getType() == KW_LOCAL) {
           local = true;
         } else {
-          toInfixString((ASTNode) root.getChild(i), sb);
+          toInfixString((ASTNode) root.getChild(i), sb, appendMode);
         }
       }
       buf.append(local ? " local": "").append(" directory ").append(sb);
     } else if (TOK_TAB == rootType) {
       buf.append(" table ");
       for (int i = 0; i < root.getChildCount(); i++) {
-        toInfixString((ASTNode) root.getChild(i), buf);
+        toInfixString((ASTNode) root.getChild(i), buf, appendMode);
       }
 
     } else {
       if (root.getChildCount() > 0) {
         for (int i = 0; i < root.getChildCount(); i++) {
-          toInfixString((ASTNode) root.getChild(i), buf);
+          toInfixString((ASTNode) root.getChild(i), buf, appendMode);
         }
       } else {
         // for other types which are not handled above
@@ -561,11 +566,11 @@ public final class HQLParser {
   }
 
   // Get string representation of a function node in query AST
-  private static void functionString(ASTNode root, StringBuilder buf) {
+  private static void functionString(ASTNode root, StringBuilder buf, AppendMode appendMode) {
     // special handling for CASE udf
     if (findNodeByPath(root, KW_CASE) != null) {
       buf.append("case ");
-      toInfixString((ASTNode) root.getChild(1), buf);
+      toInfixString((ASTNode) root.getChild(1), buf, appendMode);
       // each of the conditions
       ArrayList<Node> caseChildren = root.getChildren();
       int from = 2;
@@ -574,15 +579,15 @@ public final class HQLParser {
 
       for (int i = from; i < to; i += 2) {
         buf.append(" when ");
-        toInfixString((ASTNode) caseChildren.get(i), buf);
+        toInfixString((ASTNode) caseChildren.get(i), buf, appendMode);
         buf.append(" then ");
-        toInfixString((ASTNode) caseChildren.get(i + 1), buf);
+        toInfixString((ASTNode) caseChildren.get(i + 1), buf, appendMode);
       }
 
       // check if there is an ELSE node
       if (nchildren % 2 == 1) {
         buf.append(" else ");
-        toInfixString((ASTNode) caseChildren.get(nchildren - 1), buf);
+        toInfixString((ASTNode) caseChildren.get(nchildren - 1), buf, appendMode);
       }
 
       buf.append(" end");
@@ -599,27 +604,27 @@ public final class HQLParser {
 
       for (int i = from; i < to; i += 2) {
         buf.append(" when ");
-        toInfixString((ASTNode) caseChildren.get(i), buf);
+        toInfixString((ASTNode) caseChildren.get(i), buf, appendMode);
         buf.append(" then ");
-        toInfixString((ASTNode) caseChildren.get(i + 1), buf);
+        toInfixString((ASTNode) caseChildren.get(i + 1), buf, appendMode);
       }
 
       // check if there is an ELSE node
       if (nchildren % 2 == 0) {
         buf.append(" else ");
-        toInfixString((ASTNode) caseChildren.get(nchildren - 1), buf);
+        toInfixString((ASTNode) caseChildren.get(nchildren - 1), buf, appendMode);
       }
 
       buf.append(" end");
 
     } else if (findNodeByPath(root, TOK_ISNULL) != null) {
       // IS NULL operator
-      toInfixString((ASTNode) root.getChild(1), buf);
+      toInfixString((ASTNode) root.getChild(1), buf, appendMode);
       buf.append(" is null");
 
     } else if (findNodeByPath(root, TOK_ISNOTNULL) != null) {
       // IS NOT NULL operator
-      toInfixString((ASTNode) root.getChild(1), buf);
+      toInfixString((ASTNode) root.getChild(1), buf, appendMode);
       buf.append(" is not null");
 
     } else if (root.getChild(0).getType() == Identifier
@@ -629,24 +634,24 @@ public final class HQLParser {
       ASTNode tokFalse = findNodeByPath(root, KW_FALSE);
       if (tokTrue != null) {
         // NOT BETWEEN
-        toInfixString((ASTNode) root.getChild(2), buf);
+        toInfixString((ASTNode) root.getChild(2), buf, appendMode);
         buf.append(" not between ");
-        toInfixString((ASTNode) root.getChild(3), buf);
+        toInfixString((ASTNode) root.getChild(3), buf, appendMode);
         buf.append(" and ");
-        toInfixString((ASTNode) root.getChild(4), buf);
+        toInfixString((ASTNode) root.getChild(4), buf, appendMode);
       } else if (tokFalse != null) {
         // BETWEEN
-        toInfixString((ASTNode) root.getChild(2), buf);
+        toInfixString((ASTNode) root.getChild(2), buf, appendMode);
         buf.append(" between ");
-        toInfixString((ASTNode) root.getChild(3), buf);
+        toInfixString((ASTNode) root.getChild(3), buf, appendMode);
         buf.append(" and ");
-        toInfixString((ASTNode) root.getChild(4), buf);
+        toInfixString((ASTNode) root.getChild(4), buf, appendMode);
       }
 
     } else if (findNodeByPath(root, KW_IN) != null) {
       // IN operator
 
-      toInfixString((ASTNode) root.getChild(1), buf);
+      toInfixString((ASTNode) root.getChild(1), buf, appendMode);
 
       // check if this is NOT In
       ASTNode rootParent = (ASTNode) root.getParent();
@@ -657,7 +662,7 @@ public final class HQLParser {
       buf.append(" in (");
 
       for (int i = 2; i < root.getChildCount(); i++) {
-        toInfixString((ASTNode) root.getChild(i), buf);
+        toInfixString((ASTNode) root.getChild(i), buf, appendMode);
         if (i < root.getChildCount() - 1) {
           buf.append(" , ");
         }
@@ -666,27 +671,27 @@ public final class HQLParser {
       buf.append(")");
     } else if (findNodeByPath(root, KW_CAST) != null) {
       buf.append("cast");
-      toInfixString((ASTNode) root.getChild(1), buf);
+      toInfixString((ASTNode) root.getChild(1), buf, appendMode);
       buf.append(" as ");
-      toInfixString((ASTNode) root.getChild(0), buf);
+      toInfixString((ASTNode) root.getChild(0), buf, appendMode);
     } else {
       int rootType = root.getChild(0).getType();
       if (PRIMITIVE_TYPES.contains(rootType)) {
         // cast expression maps to the following ast
         // KW_CAST LPAREN expression KW_AS primitiveType RPAREN -> ^(TOK_FUNCTION primitiveType expression)
         buf.append("cast(");
-        toInfixString((ASTNode) root.getChild(1), buf);
+        toInfixString((ASTNode) root.getChild(1), buf, appendMode);
         buf.append(" as ");
-        toInfixString((ASTNode) root.getChild(0), buf);
+        toInfixString((ASTNode) root.getChild(0), buf, appendMode);
         buf.append(")");
       } else {
         // Normal UDF
         String fname = root.getChild(0).getText();
         // Function name
-        buf.append(fname.toLowerCase()).append("(");
+        buf.append(appendMode.convert(fname)).append("(");
         // Arguments separated by comma
         for (int i = 1; i < root.getChildCount(); i++) {
-          toInfixString((ASTNode) root.getChild(i), buf);
+          toInfixString((ASTNode) root.getChild(i), buf, appendMode);
           if (i != root.getChildCount() - 1) {
             buf.append(", ");
           }
@@ -702,6 +707,12 @@ public final class HQLParser {
     printAST(getHiveTokenMapping(), ast, 0, 0);
   }
 
+  public static String getString(ASTNode tree, AppendMode appendMode) {
+    StringBuilder buf = new StringBuilder();
+    toInfixString(tree, buf, appendMode);
+    return buf.toString().trim().replaceAll("\\s+", " ");
+  }
+
   public static String getString(ASTNode tree) {
     StringBuilder buf = new StringBuilder();
     toInfixString(tree, buf);
@@ -911,4 +922,15 @@ public final class HQLParser {
     }
   }
 
+  public enum AppendMode {
+    LOWER_CASE {
+      @Override public String convert(String s) {
+        return s.toLowerCase();
+      }
+    },
+    DEFAULT;
+    public String convert(String s) {
+      return s;
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/241603cf/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
index eeba861..07852a0 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
@@ -19,14 +19,17 @@
 
 package org.apache.lens.cube.parse;
 
-import static org.apache.lens.cube.metadata.DateFactory.NOW;
-import static org.apache.lens.cube.metadata.DateFactory.TWODAYS_BACK;
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.metadata.UpdatePeriod.DAILY;
 
 import java.text.DateFormat;
-import java.util.Date;
+import java.util.*;
+
+import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.server.api.error.LensException;
 
 import org.testng.Assert;
+import org.testng.annotations.Test;
 
 public class TestBetweenTimeRangeWriter extends TestTimeRangeWriter {
 
@@ -62,4 +65,30 @@ public class TestBetweenTimeRangeWriter extends TestTimeRangeWriter {
     String last = format.format(end);
     return " (" + alias + "." + colName + " BETWEEN '" + first + "' AND '" + last + "') ";
   }
+
+  @Test
+  public void testSinglePartBetweenOnly() throws LensException {
+    Set<FactPartition> answeringParts = new LinkedHashSet<FactPartition>();
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -1), DAILY, null, null));
+    String whereClause = getTimerangeWriter().getTimeRangeWhereClause(getMockedCubeContext(true), "test",
+      answeringParts);
+    validateBetweenOnlySingle(whereClause, null);
+
+    answeringParts = new LinkedHashSet<>();
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -1), DAILY, null, DB_FORMAT));
+    whereClause = getTimerangeWriter().getTimeRangeWhereClause(getMockedCubeContext(true), "test", answeringParts);
+    validateBetweenOnlySingle(whereClause, DB_FORMAT);
+
+  }
+
+  public void validateBetweenOnlySingle(String whereClause, DateFormat format) {
+    String expected = null;
+    if (format == null) {
+      expected =
+        getBetweenClause("test", "dt", getDateWithOffset(DAILY, -1), getDateWithOffset(DAILY, -1), DAILY.format());
+    } else {
+      expected = getBetweenClause("test", "dt", getDateWithOffset(DAILY, -1), getDateWithOffset(DAILY, -1), format);
+    }
+    Assert.assertEquals(expected, whereClause);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/241603cf/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
index 748f92f..3417031 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
@@ -33,6 +33,9 @@ import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.server.api.error.LensException;
 
+import org.apache.hadoop.conf.Configuration;
+
+import org.mockito.Mockito;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -50,6 +53,15 @@ public abstract class TestTimeRangeWriter {
 
   public abstract void validateConsecutive(String whereClause, DateFormat format);
 
+  protected CubeQueryContext getMockedCubeContext(boolean betweenOnly) {
+    CubeQueryContext context = Mockito.mock(CubeQueryContext.class);
+    Configuration configuration = new Configuration();
+    configuration.setBoolean(CubeQueryConfUtil.BETWEEN_ONLY_TIME_RANGE_WRITER, betweenOnly);
+    Mockito.when(context.getConf()).thenReturn(configuration);
+    Mockito.when(context.shouldReplaceTimeDimWithPart()).thenReturn(true);
+    return context;
+  }
+
   public void validateSingle(String whereClause, DateFormat format) {
     List<String> parts = new ArrayList<String>();
     if (format == null) {
@@ -74,7 +86,7 @@ public abstract class TestTimeRangeWriter {
     LensException th = null;
     String whereClause = null;
     try {
-      whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
+      whereClause = getTimerangeWriter().getTimeRangeWhereClause(getMockedCubeContext(false), "test", answeringParts);
     } catch (LensException e) {
       log.error("Semantic exception while testing disjoint parts.", e);
       th = e;
@@ -98,7 +110,7 @@ public abstract class TestTimeRangeWriter {
 
     th = null;
     try {
-      whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
+      whereClause = getTimerangeWriter().getTimeRangeWhereClause(getMockedCubeContext(false), "test", answeringParts);
     } catch (LensException e) {
       th = e;
     }
@@ -124,7 +136,9 @@ public abstract class TestTimeRangeWriter {
     answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -1), DAILY, null, format));
     answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -2), DAILY, null, format));
     answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, 0), DAILY, null, format));
-    String whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
+
+    String whereClause = getTimerangeWriter().getTimeRangeWhereClause(getMockedCubeContext(false), "test",
+      answeringParts);
     validateConsecutive(whereClause, format);
   }
 
@@ -132,13 +146,15 @@ public abstract class TestTimeRangeWriter {
   public void testSinglePart() throws LensException {
     Set<FactPartition> answeringParts = new LinkedHashSet<FactPartition>();
     answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -1), DAILY, null, null));
-    String whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
+    String whereClause = getTimerangeWriter().getTimeRangeWhereClause(getMockedCubeContext(false), "test",
+      answeringParts);
     validateSingle(whereClause, null);
 
     answeringParts = new LinkedHashSet<>();
     answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -1), DAILY, null, DB_FORMAT));
-    whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
+    whereClause = getTimerangeWriter().getTimeRangeWhereClause(getMockedCubeContext(false), "test", answeringParts);
     validateSingle(whereClause, DB_FORMAT);
 
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/241603cf/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
index b1fd459..75153f6 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
@@ -55,10 +55,10 @@ import lombok.extern.slf4j.Slf4j;
 public class ColumnarSQLRewriter implements QueryRewriter {
 
   /** The clause name. */
-  private String clauseName = null;
+  protected String clauseName = null;
 
   /** The qb. */
-  private QB qb;
+  protected QB qb;
 
   /** The ast. */
   protected ASTNode ast;
@@ -67,13 +67,13 @@ public class ColumnarSQLRewriter implements QueryRewriter {
   protected String query;
 
   /** The limit. */
-  private String limit;
+  protected String limit;
 
   /** The fact filters. */
-  private final StringBuilder factFilters = new StringBuilder();
+  protected final StringBuilder factFilters = new StringBuilder();
 
   /** The fact in line query. */
-  private final StringBuilder factInLineQuery = new StringBuilder();
+  protected final StringBuilder factInLineQuery = new StringBuilder();
 
   /** The all sub queries. */
   protected StringBuilder allSubQueries = new StringBuilder();
@@ -103,7 +103,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
   protected List<String> rightFilter = new ArrayList<String>();
 
   /** The left filter. */
-  private String leftFilter;
+  protected String leftFilter;
 
   /** The map agg tab alias. */
   private final Map<String, String> mapAggTabAlias = new LinkedHashMap<String, String>();
@@ -122,54 +122,54 @@ public class ColumnarSQLRewriter implements QueryRewriter {
   private final Map<String, String> dimTableToSubqueryMap = new LinkedHashMap<String, String>();
 
   /** The where tree. */
-  private String whereTree;
+  protected String whereTree;
 
   /** The having tree. */
-  private String havingTree;
+  protected String havingTree;
 
   /** The order by tree. */
-  private String orderByTree;
+  protected String orderByTree;
 
   /** The select tree. */
-  private String selectTree;
+  protected String selectTree;
 
   /** The group by tree. */
-  private String groupByTree;
+  protected String groupByTree;
 
   /** The join tree. */
-  private String joinTree;
+  protected String joinTree;
 
   /** The from tree. */
-  private String fromTree;
+  protected String fromTree;
 
   /** The join ast. */
   @Getter
-  private ASTNode joinAST;
+  protected ASTNode joinAST;
 
   /** The having ast. */
   @Getter
-  private ASTNode havingAST;
+  protected ASTNode havingAST;
 
   /** The select ast. */
   @Getter
-  private ASTNode selectAST;
+  protected ASTNode selectAST;
 
   /** The where ast. */
   @Getter
-  private ASTNode whereAST;
+  protected ASTNode whereAST;
 
   /** The order by ast. */
   @Getter
-  private ASTNode orderByAST;
+  protected ASTNode orderByAST;
 
   /** The group by ast. */
   @Getter
-  private ASTNode groupByAST;
+  protected ASTNode groupByAST;
 
   /** The from ast. */
   @Getter
   protected ASTNode fromAST;
-  private Map<String, String> regexReplaceMap;
+  protected Map<String, String> regexReplaceMap;
 
   /**
    * Instantiates a new columnar sql rewriter.
@@ -413,7 +413,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
    * @param count
    * @return Number of fact columns used in expression
    */
-  private int getNumFactTableInExpressions(ASTNode node, MutableInt count) {
+  protected int getNumFactTableInExpressions(ASTNode node, MutableInt count) {
 
     if (node == null) {
       log.debug("ASTNode is null ");
@@ -1072,7 +1072,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
    * @param fromTables the from tables
    * @return the all tablesfrom from ast
    */
-  private void getAllTablesfromFromAST(ASTNode from, ArrayList<String> fromTables) {
+  protected void getAllTablesfromFromAST(ASTNode from, ArrayList<String> fromTables) {
     String table;
     if (TOK_TABREF == from.getToken().getType()) {
       ASTNode tabName = (ASTNode) from.getChild(0);
@@ -1098,7 +1098,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
    *
    * @param from
    */
-  private void updateAliasFromAST(ASTNode from) {
+  protected void updateAliasFromAST(ASTNode from) {
 
     String newAlias;
     String table;
@@ -1129,7 +1129,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
    *
    * @param tree
    */
-  private void replaceAlias(ASTNode tree) {
+  protected void replaceAlias(ASTNode tree) {
     if (TOK_TABLE_OR_COL == tree.getToken().getType()) {
       ASTNode alias = (ASTNode) tree.getChild(0);
       if (mapAliases.get(tree.getChild(0).toString()) != null) {
@@ -1157,7 +1157,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
    * @param orderbytree the orderbytree
    * @param limit       the limit
    */
-  private void constructQuery(String selecttree, String wheretree, String groupbytree,
+  protected void constructQuery(String selecttree, String wheretree, String groupbytree,
     String havingtree, String orderbytree, String limit) {
 
     String finalJoinClause = "";
@@ -1235,7 +1235,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
 
 
   @NoArgsConstructor
-  private static class NativeTableInfo {
+  protected static class NativeTableInfo {
     private Map<String, String> columnMapping = new LinkedHashMap<>();
     NativeTableInfo(Table tbl) {
       String columnMappingProp = tbl.getProperty(LensConfConstants.NATIVE_TABLE_COLUMN_MAPPING);
@@ -1253,7 +1253,7 @@ public class ColumnarSQLRewriter implements QueryRewriter {
     }
   }
 
-  private Map<String, NativeTableInfo> aliasToNativeTableInfo = new LinkedHashMap<>();
+  protected Map<String, NativeTableInfo> aliasToNativeTableInfo = new LinkedHashMap<>();
 
   /**
    * Replace with underlying storage.

http://git-wip-us.apache.org/repos/asf/lens/blob/241603cf/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/DruidSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/DruidSQLRewriter.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/DruidSQLRewriter.java
new file mode 100644
index 0000000..eb1d69c
--- /dev/null
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/DruidSQLRewriter.java
@@ -0,0 +1,260 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.driver.jdbc;
+
+import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
+
+import java.util.ArrayList;
+import java.util.TreeSet;
+
+import org.apache.lens.cube.parse.CubeSemanticAnalyzer;
+import org.apache.lens.cube.parse.HQLParser;
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.QB;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class DruidSQLRewriter extends ColumnarSQLRewriter {
+
+  /**
+   * Whether to resolve native tables or not. In case the query has sub query, the outer query may not
+   * require native table resolution
+   */
+  private boolean resolveNativeTables;
+
+  /**
+   * Analyze internal.
+   *
+   * @throws SemanticException the semantic exception
+   */
+  public void analyzeInternal(Configuration conf, HiveConf hconf) throws SemanticException {
+    CubeSemanticAnalyzer c1 = new CubeSemanticAnalyzer(conf, hconf);
+
+    QB qb = new QB(null, null, false);
+
+    if (!c1.doPhase1(ast, qb, c1.initPhase1Ctx(), null)) {
+      return;
+    }
+
+    if (!qb.getSubqAliases().isEmpty()) {
+      log.warn("Subqueries in from clause is not supported by {} Query : {}", this, this.query);
+      throw new SemanticException("Subqueries in from clause is not supported by " + this + " Query : " + this.query);
+    }
+
+    // Get clause name
+    TreeSet<String> ks = new TreeSet<String>(qb.getParseInfo().getClauseNames());
+    /* The clause name. */
+    String clauseName = ks.first();
+
+    if (qb.getParseInfo().getJoinExpr() != null) {
+      log.warn("Join queries not supported by {} Query : {}", this, this.query);
+      throw new SemanticException("Join queries not supported by " + this + " Query : " + this.query);
+    }
+    // Split query into trees
+    if (qb.getParseInfo().getWhrForClause(clauseName) != null) {
+      this.whereAST = qb.getParseInfo().getWhrForClause(clauseName);
+    }
+
+    if (qb.getParseInfo().getHavingForClause(clauseName) != null) {
+      this.havingAST = qb.getParseInfo().getHavingForClause(clauseName);
+    }
+
+    if (qb.getParseInfo().getOrderByForClause(clauseName) != null) {
+      this.orderByAST = qb.getParseInfo().getOrderByForClause(clauseName);
+    }
+
+    if (qb.getParseInfo().getGroupByForClause(clauseName) != null) {
+      this.groupByAST = qb.getParseInfo().getGroupByForClause(clauseName);
+    }
+
+    if (qb.getParseInfo().getSelForClause(clauseName) != null) {
+      this.selectAST = qb.getParseInfo().getSelForClause(clauseName);
+    }
+
+    this.fromAST = HQLParser.findNodeByPath(ast, TOK_FROM);
+
+  }
+
+  /**
+   * Builds the query.
+   *
+   * @throws SemanticException
+   */
+  public void buildDruidQuery(Configuration conf, HiveConf hconf) throws SemanticException, LensException {
+    analyzeInternal(conf, hconf);
+    if (resolveNativeTables) {
+      replaceWithUnderlyingStorage(hconf);
+    }
+
+    // Get the limit clause
+    String limit = getLimitClause(ast);
+
+    ArrayList<String> filters = new ArrayList<>();
+    getWhereString(whereAST, filters);
+
+    // construct query with fact sub query
+    constructQuery(HQLParser.getString(selectAST, HQLParser.AppendMode.DEFAULT), filters,
+      HQLParser.getString(groupByAST, HQLParser.AppendMode.DEFAULT),
+      HQLParser.getString(havingAST, HQLParser.AppendMode.DEFAULT),
+      HQLParser.getString(orderByAST, HQLParser.AppendMode.DEFAULT), limit);
+
+  }
+
+  private ArrayList<String> getWhereString(ASTNode node, ArrayList<String> filters) throws LensException {
+
+    if (node == null) {
+      return null;
+    }
+    if (node.getToken().getType() == HiveParser.KW_AND) {
+      // left child is "and" and right child is subquery
+      if (node.getChild(0).getType() == HiveParser.KW_AND) {
+        filters.add(getfilterSubquery(node, 1));
+      } else if (node.getChildCount() > 1) {
+        for (int i = 0; i < node.getChildCount(); i++) {
+          filters.add(getfilterSubquery(node, i));
+        }
+      }
+    } else if (node.getParent().getType() == HiveParser.TOK_WHERE
+      && node.getToken().getType() != HiveParser.KW_AND) {
+      filters.add(HQLParser.getString(node, HQLParser.AppendMode.DEFAULT));
+    }
+    for (int i = 0; i < node.getChildCount(); i++) {
+      ASTNode child = (ASTNode) node.getChild(i);
+      return getWhereString(child, filters);
+    }
+    return filters;
+  }
+
+  private String getfilterSubquery(ASTNode node, int index) throws LensException {
+    String filter;
+    if (node.getChild(index).getType() == HiveParser.TOK_SUBQUERY_EXPR) {
+      log.warn("Subqueries in where clause not supported by {} Query : {}", this, this.query);
+      throw new LensException("Subqueries in where clause not supported by " + this + " Query : " + this.query);
+    } else {
+      filter = HQLParser.getString((ASTNode) node.getChild(index), HQLParser.AppendMode.DEFAULT);
+    }
+    return filter;
+  }
+
+  /**
+   * Construct final query using all trees
+   *
+   * @param selecttree   the selecttree
+   * @param whereFilters the wheretree
+   * @param groupbytree  the groupbytree
+   * @param havingtree   the havingtree
+   * @param orderbytree  the orderbytree
+   * @param limit        the limit
+   */
+  private void constructQuery(
+    String selecttree, ArrayList<String> whereFilters, String groupbytree,
+    String havingtree, String orderbytree, String limit) {
+
+    log.info("In construct query ..");
+
+    rewrittenQuery.append("select ").append(selecttree.replaceAll("`", "\"")).append(" from ");
+
+    String factNameAndAlias = getFactNameAlias(fromAST);
+
+    rewrittenQuery.append(factNameAndAlias);
+
+    if (!whereFilters.isEmpty()) {
+      rewrittenQuery.append(" where ").append(StringUtils.join(whereFilters, " and "));
+    }
+    if (StringUtils.isNotBlank(groupbytree)) {
+      rewrittenQuery.append(" group by ").append(groupbytree);
+    }
+    if (StringUtils.isNotBlank(havingtree)) {
+      rewrittenQuery.append(" having ").append(havingtree);
+    }
+    if (StringUtils.isNotBlank(orderbytree)) {
+      rewrittenQuery.append(" order by ").append(orderbytree);
+    }
+    if (StringUtils.isNotBlank(limit)) {
+      rewrittenQuery.append(" limit ").append(limit);
+    }
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.apache.lens.server.api.query.QueryRewriter#rewrite(java.lang.String, org.apache.hadoop.conf.Configuration)
+   */
+  @Override
+  public String rewrite(String query, Configuration conf, HiveConf metastoreConf) throws LensException {
+    this.query = query;
+    String reWritten = rewrite(HQLParser.parseHQL(query, metastoreConf), conf, metastoreConf, true);
+
+    log.info("Rewritten : {}", reWritten);
+    String queryReplacedUdf = replaceUDFForDB(reWritten);
+    log.info("Input Query : {}", query);
+    log.info("Rewritten Query : {}", queryReplacedUdf);
+    return queryReplacedUdf;
+  }
+
+  public String rewrite(ASTNode currNode, Configuration conf, HiveConf metastoreConf, boolean resolveNativeTables)
+    throws LensException {
+    this.resolveNativeTables = resolveNativeTables;
+    rewrittenQuery.setLength(0);
+    reset();
+    this.ast = currNode;
+
+    ASTNode fromNode = HQLParser.findNodeByPath(currNode, TOK_FROM);
+    if (fromNode != null) {
+      if (fromNode.getChild(0).getType() == TOK_SUBQUERY) {
+        log.warn("Subqueries in from clause not supported by {} Query : {}", this, this.query);
+        throw new LensException("Subqueries in from clause not supported by " + this + " Query : " + this.query);
+      } else if (isOfTypeJoin(fromNode.getChild(0).getType())) {
+        log.warn("Join in from clause not supported by {} Query : {}", this, this.query);
+        throw new LensException("Join in from clause not supported by " + this + " Query : " + this.query);
+      }
+    }
+
+    if (currNode.getToken().getType() == TOK_UNIONALL) {
+      log.warn("Union queries are not supported by {} Query : {}", this, this.query);
+      throw new LensException("Union queries are not supported by " + this + " Query : " + this.query);
+    }
+
+    String rewritternQueryText = rewrittenQuery.toString();
+    if (currNode.getToken().getType() == TOK_QUERY) {
+      try {
+        buildDruidQuery(conf, metastoreConf);
+        rewritternQueryText = rewrittenQuery.toString();
+        log.info("Rewritten query from build : " + rewritternQueryText);
+      } catch (SemanticException e) {
+        throw new LensException(e);
+      }
+    }
+    return rewritternQueryText;
+  }
+
+  private boolean isOfTypeJoin(int type) {
+    return (type == TOK_JOIN || type == TOK_LEFTOUTERJOIN || type == TOK_RIGHTOUTERJOIN
+      || type == TOK_FULLOUTERJOIN || type == TOK_LEFTSEMIJOIN || type == TOK_UNIQUEJOIN);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/241603cf/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestDruidSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestDruidSQLRewriter.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestDruidSQLRewriter.java
new file mode 100644
index 0000000..c4cc91d
--- /dev/null
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestDruidSQLRewriter.java
@@ -0,0 +1,411 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.driver.jdbc;
+
+import static org.testng.Assert.*;
+
+import java.io.File;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lens.cube.parse.HQLParser;
+import org.apache.lens.cube.parse.TestQuery;
+import org.apache.lens.server.api.LensConfConstants;
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+import org.testng.Assert;
+import org.testng.annotations.AfterTest;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class TestDruidSQLRewriter {
+
+  HiveConf hconf = new HiveConf();
+  Configuration conf = new Configuration();
+  DruidSQLRewriter qtest = new DruidSQLRewriter();
+
+  /*
+  * Star schema used for the queries below
+  *
+  * create table sales_fact (time_key varchar,item_key varchar, dollars_sold double, units_sold int);
+  * /
+  *
+ /**
+  * Compare queries.
+  *
+  * @param expected the expected
+  * @param actual   the actual
+  */
+  private void compareQueries(String actual, String expected) {
+    assertEquals(new TestQuery(actual), new TestQuery(expected));
+  }
+
+  /**
+   * Setup.
+   *
+   * @throws Exception the exception
+   */
+  @BeforeTest
+  public void setup() throws Exception {
+    conf.addResource("jdbcdriver-default.xml");
+    conf.addResource("drivers/jdbc/jdbc1/jdbcdriver-site.xml");
+    conf.addResource("drivers/jdbc/druid/jdbcdriver-site.xml");
+    qtest.init(conf);
+    hconf.addResource(conf);
+    SessionState.start(hconf);
+    List<FieldSchema> factColumns = new ArrayList<>();
+    factColumns.add(new FieldSchema("time_key", "string", ""));
+    factColumns.add(new FieldSchema("item_key", "int", ""));
+    factColumns.add(new FieldSchema("dollars_sold", "double", ""));
+    factColumns.add(new FieldSchema("units_sold", "int", ""));
+
+    try {
+      createHiveTable("default", "sales_fact", factColumns);
+    } catch (HiveException e) {
+      log.error("Encountered hive exception.", e);
+    }
+  }
+
+  /**
+   * Creates the hive table.
+   *
+   * @param db      the db
+   * @param table   the table
+   * @param columns the columns
+   * @throws Exception the exception
+   */
+  void createHiveTable(String db, String table, List<FieldSchema> columns) throws Exception {
+    Table tbl1 = new Table(db, table);
+    tbl1.setFields(columns);
+
+    Hive.get().createTable(tbl1);
+    System.out.println("Created table : " + table);
+  }
+
+  /**
+   * Clean.
+   *
+   * @throws HiveException the hive exception
+   */
+  @AfterTest
+  public void clean() throws HiveException {
+    try {
+      Hive.get().dropTable("default.sales_fact");
+    } catch (HiveException e) {
+      log.error("Encountered hive exception", e);
+    }
+  }
+
+  @Test
+  // Testing multiple queries in one instance
+  public void testNoRewrite() throws LensException {
+
+    SessionState.start(hconf);
+
+    String query = "select count(distinct time_key) from sales_fact";
+    String actual = qtest.rewrite(query, conf, hconf);
+    String expected = "select count( distinct  time_key ) from sales_fact ";
+    compareQueries(actual, expected);
+
+    String query2 = "select count(distinct time_key) from sales_fact sales_fact";
+    String actual2 = qtest.rewrite(query2, conf, hconf);
+    String expected2 = "select count( distinct  time_key ) from sales_fact sales_fact___sales_fact";
+    compareQueries(expected2, actual2);
+
+    String query3 = "select count(distinct sales_fact.time_key) from db.sales_fact sales_fact";
+    String actual3 = qtest.rewrite(query3, conf, hconf);
+    String expected3 = "select count( distinct ( sales_fact__db_sales_fact_sales_fact . time_key )) "
+      + "from db.sales_fact sales_fact__db_sales_fact_sales_fact";
+    compareQueries(expected3, actual3);
+  }
+
+
+  @Test
+  public void testRewrittenQuery() throws LensException {
+
+    String query =
+      "select fact.time_key as `Time Key`, sum(fact.dollars_sold) from sales_fact fact group by fact.time_key order"
+        + " by dollars_sold  ";
+
+    SessionState.start(hconf);
+    String actual = qtest.rewrite(query, conf, hconf);
+    String expected = "select ( fact . time_key ) as \"Time Key\" , sum(( fact . dollars_sold )) from sales_fact "
+      + "fact group by ( fact . time_key ) order by dollars_sold asc";
+    compareQueries(actual, expected);
+  }
+
+  @Test
+  public void testJoinQueryFail() {
+    String query =
+      "select time_dim.day_of_week, sum(fact.dollars_sold) as dollars_sold from sales_fact fact  "
+        + "inner join item_dim item_dim on fact.item_key = item_dim.item_key "
+        + "where fact.item_key in (select item_key from test.item_dim idim where idim.item_name = 'item_1') ";
+
+    SessionState.start(hconf);
+    try {
+      qtest.rewrite(query, conf, hconf);
+      Assert.fail("The Join query did NOT suffer any exception");
+    } catch (LensException e) {
+      System.out.println("Exception as expected in Join testcase");
+    }
+  }
+
+  @Test
+  public void testWhereSubQueryFail() {
+    String query =
+      "select time_dim.day_of_week, sum(fact.dollars_sold) as dollars_sold from sales_fact fact  "
+        + "where fact.item_key in (select item_key from test.item_dim idim where idim.item_name = 'item_1') "
+        + "and fact.location_key in (select location_key from test.location_dim ldim where "
+        + "ldim.location_name = 'loc_1') "
+        + "group by time_dim.day_of_week "
+        + "order by dollars_sold";
+
+    SessionState.start(hconf);
+
+    try {
+      qtest.rewrite(query, conf, hconf);
+      Assert.fail("The Where Sub query did NOT suffer any exception");
+    } catch (LensException e) {
+      System.out.println("Exception as expected in where sub query..");
+    }
+  }
+
+  @Test
+  public void testUnionQueryFail() {
+    String query = "select a,sum(b)as b from ( select a,b from tabl1 where a<=10  union all select a,b from tabl2 where"
+      + " a>10 and a<=20 union all select a,b from tabl3 where a>20 )unionResult group by a order by b desc limit 10";
+
+    SessionState.start(hconf);
+    try {
+      qtest.rewrite(query, conf, hconf);
+      Assert.fail("The invalid query did NOT suffer any exception");
+    } catch (LensException e) {
+      System.out.println("Exception as expected in Union query..");
+    }
+  }
+
+  /**
+   * Test replace db name.
+   *
+   * @throws Exception the exception
+   */
+  @Test
+  public void testReplaceDBName() throws Exception {
+    File jarDir = new File("target/testjars");
+    File testJarFile = new File(jarDir, "test.jar");
+    File serdeJarFile = new File(jarDir, "serde.jar");
+
+    URL[] serdeUrls = new URL[2];
+    serdeUrls[0] = new URL("file:" + testJarFile.getAbsolutePath());
+    serdeUrls[1] = new URL("file:" + serdeJarFile.getAbsolutePath());
+
+    URLClassLoader createTableClassLoader = new URLClassLoader(serdeUrls, hconf.getClassLoader());
+    hconf.setClassLoader(createTableClassLoader);
+    SessionState.start(hconf);
+
+    // Create test table
+    Database database = new Database();
+    database.setName("mydb");
+
+    Hive.get(hconf).createDatabase(database);
+    SessionState.get().setCurrentDatabase("mydb");
+    createTable(hconf, "mydb", "mytable", "testDB", "testTable_1");
+
+    String query = "SELECT * FROM mydb.mytable t1 WHERE A = 100";
+
+    DruidSQLRewriter rewriter = new DruidSQLRewriter();
+    rewriter.init(conf);
+    rewriter.ast = HQLParser.parseHQL(query, hconf);
+    rewriter.query = query;
+    rewriter.analyzeInternal(conf, hconf);
+
+    String joinTreeBeforeRewrite = HQLParser.getString(rewriter.fromAST);
+    System.out.println(joinTreeBeforeRewrite);
+
+    // Rewrite
+    rewriter.replaceWithUnderlyingStorage(hconf);
+    String joinTreeAfterRewrite = HQLParser.getString(rewriter.fromAST);
+    System.out.println("joinTreeAfterRewrite:" + joinTreeAfterRewrite);
+
+    // Tests
+    assertTrue(joinTreeBeforeRewrite.contains("mydb"));
+    assertTrue(joinTreeBeforeRewrite.contains("mytable"));
+
+    assertFalse(joinTreeAfterRewrite.contains("mydb"));
+    assertFalse(joinTreeAfterRewrite.contains("mytable"));
+
+    assertTrue(joinTreeAfterRewrite.contains("testdb"));
+    assertTrue(joinTreeAfterRewrite.contains("testtable_1"));
+
+    // Rewrite one more query where table and db name is not set
+    createTable(hconf, "mydb", "mytable_2", null, null);
+    String query2 = "SELECT * FROM mydb.mytable_2 WHERE a = 100";
+    rewriter.ast = HQLParser.parseHQL(query2, hconf);
+    rewriter.query = query2;
+    rewriter.analyzeInternal(conf, hconf);
+
+    joinTreeBeforeRewrite = HQLParser.getString(rewriter.fromAST);
+    System.out.println(joinTreeBeforeRewrite);
+
+    // Rewrite
+    rewriter.replaceWithUnderlyingStorage(hconf);
+    joinTreeAfterRewrite = HQLParser.getString(rewriter.fromAST);
+    System.out.println(joinTreeAfterRewrite);
+
+    // Rewrite should not replace db and table name since its not set
+    assertEquals(joinTreeAfterRewrite, joinTreeBeforeRewrite);
+
+    // Test a query with default db
+    Hive.get().dropTable("mydb", "mytable");
+    database = new Database();
+    database.setName("examples");
+    Hive.get().createDatabase(database);
+    createTable(hconf, "examples", "mytable", "default", null);
+
+    String defaultQuery = "SELECT * FROM examples.mytable t1 WHERE A = 100";
+    rewriter.ast = HQLParser.parseHQL(defaultQuery, hconf);
+    rewriter.query = defaultQuery;
+    rewriter.analyzeInternal(conf, hconf);
+    joinTreeBeforeRewrite = HQLParser.getString(rewriter.fromAST);
+    rewriter.replaceWithUnderlyingStorage(hconf);
+    joinTreeAfterRewrite = HQLParser.getString(rewriter.fromAST);
+    assertTrue(joinTreeBeforeRewrite.contains("examples"), joinTreeBeforeRewrite);
+    assertFalse(joinTreeAfterRewrite.contains("examples"), joinTreeAfterRewrite);
+    System.out.println("default case: " + joinTreeAfterRewrite);
+
+    Hive.get().dropTable("mydb", "mytable");
+    Hive.get().dropTable("mydb", "mytable_2");
+    Hive.get().dropTable("examples", "mytable");
+
+    Hive.get().dropDatabase("mydb", true, true, true);
+    Hive.get().dropDatabase("examples", true, true, true);
+    SessionState.get().setCurrentDatabase("default");
+  }
+
+  void createTable(HiveConf conf, String db, String table, String udb, String utable) throws Exception {
+    createTable(conf, db, table, udb, utable, true, null);
+  }
+
+  /**
+   * Test replace column mapping.
+   *
+   * @throws Exception the exception
+   */
+  @Test
+  public void testReplaceColumnMapping() throws Exception {
+    SessionState.start(hconf);
+    String testDB = "testrcm";
+    Hive.get().dropDatabase(testDB, true, true, true);
+
+    // Create test table
+    Database database = new Database();
+    database.setName(testDB);
+
+    Hive.get(hconf).createDatabase(database);
+    try {
+      SessionState.get().setCurrentDatabase(testDB);
+      Map<String, String> columnMap = new HashMap<>();
+      columnMap.put("id", "id1");
+      columnMap.put("name", "name1");
+      columnMap.put("dollars_sold", "Dollars_Sold");
+      columnMap.put("units_sold", "Units_Sold");
+
+      createTable(hconf, testDB, "mytable", "testDB", "testTable_1", false, columnMap);
+
+      String query = "SELECT t1.id, t1.name, sum(t1.dollars_sold), sum(t1.units_sold) FROM " + testDB
+        + ".mytable t1 WHERE t1.id = 100 GROUP BY t1.id HAVING count(t1.id) > 2 ORDER BY t1.id";
+
+      DruidSQLRewriter rewriter = new DruidSQLRewriter();
+      rewriter.init(conf);
+      rewriter.ast = HQLParser.parseHQL(query, hconf);
+      rewriter.query = query;
+      rewriter.analyzeInternal(conf, hconf);
+
+      String actual = rewriter.rewrite(query, conf, hconf);
+      System.out.println("Actual : " + actual);
+      String expected =
+        "select (t1.id1), (t1.name1), sum((t1.Dollars_Sold)), sum((t1.Units_Sold)) from testDB.testTable_1 t1 where ("
+          + "(t1.id1) = 100) group by (t1.id1) having (count((t1.id1)) > 2) order by t1.id1 asc";
+
+      compareQueries(actual, expected);
+
+    } finally {
+      Hive.get().dropTable(testDB, "mytable", true, true);
+      Hive.get().dropDatabase(testDB, true, true, true);
+      SessionState.get().setCurrentDatabase("default");
+    }
+  }
+
+  /**
+   * Creates the table.
+   *
+   * @param db     the db
+   * @param table  the table
+   * @param udb    the udb
+   * @param utable the utable
+   * @param setCustomSerde whether to set custom serde or not
+   * @param columnMapping columnmapping for the table
+   *
+   * @throws Exception the exception
+   */
+  void createTable(
+    HiveConf conf, String db, String table, String udb, String utable, boolean setCustomSerde,
+    Map<String, String> columnMapping) throws Exception {
+    Table tbl1 = new Table(db, table);
+
+    if (StringUtils.isNotBlank(udb)) {
+      tbl1.setProperty(LensConfConstants.NATIVE_DB_NAME, udb);
+    }
+    if (StringUtils.isNotBlank(utable)) {
+      tbl1.setProperty(LensConfConstants.NATIVE_TABLE_NAME, utable);
+    }
+    if (columnMapping != null && !columnMapping.isEmpty()) {
+      tbl1.setProperty(LensConfConstants.NATIVE_TABLE_COLUMN_MAPPING, StringUtils.join(columnMapping.entrySet(), ","));
+      log.info("columnMapping property:{}", tbl1.getProperty(LensConfConstants.NATIVE_TABLE_COLUMN_MAPPING));
+    }
+
+    List<FieldSchema> columns = new ArrayList<FieldSchema>();
+    columns.add(new FieldSchema("id", "int", "col1"));
+    columns.add(new FieldSchema("name", "string", "col2"));
+    columns.add(new FieldSchema("dollars_sold", "double", "col3"));
+    columns.add(new FieldSchema("units_sold", "int", "col4"));
+
+    tbl1.setFields(columns);
+
+    Hive.get(conf).createTable(tbl1);
+    System.out.println("Created table " + table);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/241603cf/lens-driver-jdbc/src/test/resources/drivers/jdbc/druid/jdbcdriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/resources/drivers/jdbc/druid/jdbcdriver-site.xml b/lens-driver-jdbc/src/test/resources/drivers/jdbc/druid/jdbcdriver-site.xml
new file mode 100644
index 0000000..e4fad23
--- /dev/null
+++ b/lens-driver-jdbc/src/test/resources/drivers/jdbc/druid/jdbcdriver-site.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+    <property>
+        <name>lens.driver.jdbc.driver.class</name>
+        <value>com.mysql.jdbc.Driver</value>
+    </property>
+    <property>
+        <name>lens.driver.jdbc.db.uri</name>
+        <value>jdbc:mysql://127.0.0.1:3307/plyql1</value>
+    </property>
+    <property>
+        <name>lens.driver.jdbc.db.user</name>
+        <value>root</value>
+    </property>
+    <property>
+        <name>lens.cube.query.driver.supported.storages</name>
+        <value>mydb</value>
+        <final>true</final>
+    </property>
+    <property>
+        <name>lens.driver.jdbc.query.rewriter</name>
+        <value>org.apache.lens.driver.jdbc.DruidSQLRewriter</value>
+    </property>
+    <property>
+        <name>lens.driver.jdbc.validate.through.prepare</name>
+        <value>false</value>
+    </property>
+    <property>
+        <name>lens.driver.jdbc.explain.keyword</name>
+        <value>explain plan for </value>
+    </property>
+    <property>
+        <name>lens.cube.query.time.range.writer.class</name>
+        <value>org.apache.lens.cube.parse.BetweenTimeRangeWriter</value>
+    </property>
+    <property>
+        <name>lens.cube.query.partition.where.clause.format</name>
+        <value>yyyy-MM-dd HH:mm:ss</value>
+    </property>
+    <property>
+        <name>lens.cube.query.between.only.time.range.writer</name>
+        <value>true</value>
+    </property>
+</configuration>
\ No newline at end of file