You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by pr...@apache.org on 2016/02/19 09:08:22 UTC

lens git commit: LENS-960: getting string from AST should give flat string wherever possible

Repository: lens
Updated Branches:
  refs/heads/master c0199ead2 -> 6be4d48fd


LENS-960: getting string from AST should give flat string wherever possible


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/6be4d48f
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/6be4d48f
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/6be4d48f

Branch: refs/heads/master
Commit: 6be4d48fd4620e13435a266380f84119f87a9233
Parents: c0199ea
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Fri Feb 19 13:36:40 2016 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Fri Feb 19 13:36:40 2016 +0530

----------------------------------------------------------------------
 .../org/apache/lens/cube/parse/HQLParser.java   | 119 +++++++++----------
 .../lens/cube/parse/TestBaseCubeQueries.java    |  12 +-
 .../lens/cube/parse/TestCubeRewriter.java       |   8 +-
 .../lens/cube/parse/TestExpressionContext.java  |  87 +++++++-------
 .../apache/lens/cube/parse/TestHQLParser.java   |  92 ++++++++------
 .../lens/driver/jdbc/ColumnarSQLRewriter.java   |  21 ++--
 .../driver/jdbc/TestColumnarSQLRewriter.java    |  10 +-
 7 files changed, 179 insertions(+), 170 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/6be4d48f/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index fdef3f1..8d6105f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -83,13 +83,14 @@ public final class HQLParser {
   }
 
   public static final Set<Integer> BINARY_OPERATORS;
+  public static final Set<Integer> N_ARY_OPERATORS;
   public static final Set<Integer> FILTER_OPERATORS;
   public static final Set<Integer> ARITHMETIC_OPERATORS;
   public static final Set<Integer> UNARY_OPERATORS;
   public static final Set<Integer> PRIMITIVE_TYPES;
 
   static {
-    HashSet<Integer> ops = new HashSet<Integer>();
+    HashSet<Integer> ops = new HashSet<>();
     ops.add(DOT);
     ops.add(KW_AND);
     ops.add(KW_OR);
@@ -113,20 +114,22 @@ public final class HQLParser {
     ops.add(BITWISEXOR);
 
     BINARY_OPERATORS = Collections.unmodifiableSet(ops);
+    N_ARY_OPERATORS = Collections.unmodifiableSet(Sets.newHashSet(KW_AND, KW_OR, PLUS, STAR,
+      AMPERSAND, BITWISEOR, BITWISEXOR));
 
-    ARITHMETIC_OPERATORS = new HashSet<Integer>();
+    ARITHMETIC_OPERATORS = new HashSet<>();
     ARITHMETIC_OPERATORS.add(PLUS);
     ARITHMETIC_OPERATORS.add(MINUS);
     ARITHMETIC_OPERATORS.add(STAR);
     ARITHMETIC_OPERATORS.add(DIVIDE);
     ARITHMETIC_OPERATORS.add(MOD);
 
-    HashSet<Integer> unaryOps = new HashSet<Integer>();
+    HashSet<Integer> unaryOps = new HashSet<>();
     unaryOps.add(KW_NOT);
     unaryOps.add(TILDE);
     UNARY_OPERATORS = Collections.unmodifiableSet(unaryOps);
 
-    HashSet<Integer> primitiveTypes = new HashSet<Integer>();
+    HashSet<Integer> primitiveTypes = new HashSet<>();
     primitiveTypes.add(TOK_TINYINT);
     primitiveTypes.add(TOK_SMALLINT);
     primitiveTypes.add(TOK_INT);
@@ -148,10 +151,6 @@ public final class HQLParser {
       EQUAL_NS);
   }
 
-  public static boolean isArithmeticOp(int tokenType) {
-    return ARITHMETIC_OPERATORS.contains(tokenType);
-  }
-
   public static ASTNode parseHQL(String query, HiveConf conf) throws LensException {
     ParseDriver driver = new ParseDriver();
     ASTNode tree = null;
@@ -199,8 +198,10 @@ public final class HQLParser {
   /**
    * Debug function for printing query AST to stdout
    *
-   * @param node
-   * @param level
+   * @param tokenMapping token mapping
+   * @param node         node
+   * @param level        level
+   * @param child        child
    */
   public static void printAST(Map<Integer, String> tokenMapping, ASTNode node, int level, int child) {
     if (node == null || node.isNil()) {
@@ -211,7 +212,7 @@ public final class HQLParser {
       System.out.print("  ");
     }
 
-    System.out.print(node.getText() + " [" + tokenMapping.get(node.getToken().getType()) + "]");
+    System.out.print(node.getText() + " [" + tokenMapping.get(node.getType()) + "]");
     System.out.print(" (l" + level + "c" + child + "p" + node.getCharPositionInLine() + ")");
 
     if (node.getChildCount() > 0) {
@@ -239,7 +240,7 @@ public final class HQLParser {
   }
 
   public static Map<Integer, String> getHiveTokenMapping() throws Exception {
-    Map<Integer, String> mapping = new HashMap<Integer, String>();
+    Map<Integer, String> mapping = new HashMap<>();
 
     for (Field f : HiveParser.class.getFields()) {
       if (f.getType() == int.class) {
@@ -256,9 +257,9 @@ public final class HQLParser {
    * Find a node in the tree rooted at root, given the path of type of tokens from the root's children to the desired
    * node
    *
-   * @param root
+   * @param root node from which searching is to be started
    * @param path starts at the level of root's children
-   * @return
+   * @return Node if found, else null
    */
   public static ASTNode findNodeByPath(ASTNode root, int... path) {
     for (int i = 0; i < path.length; i++) {
@@ -267,7 +268,7 @@ public final class HQLParser {
 
       for (int j = 0; j < root.getChildCount(); j++) {
         ASTNode node = (ASTNode) root.getChild(j);
-        if (node.getToken().getType() == type) {
+        if (node.getType() == type) {
           hasChildWithType = true;
           root = node;
           // If this is the last type in path, return this node
@@ -277,9 +278,6 @@ public final class HQLParser {
             // Go to next level
             break;
           }
-        } else {
-          // Go to next sibling.
-          continue;
         }
       }
 
@@ -308,8 +306,8 @@ public final class HQLParser {
   /**
    * Breadth first traversal of AST
    *
-   * @param root
-   * @param visitor
+   * @param root      node from where to start bft
+   * @param visitor   action to take on each visit
    * @throws LensException
    */
   public static void bft(ASTNode root, ASTNodeVisitor visitor) throws LensException {
@@ -320,7 +318,7 @@ public final class HQLParser {
     if (visitor == null) {
       throw new NullPointerException("Visitor cannot be null");
     }
-    Queue<TreeNode> queue = new LinkedList<TreeNode>();
+    Queue<TreeNode> queue = new LinkedList<>();
     queue.add(new TreeNode(null, root));
 
     while (!queue.isEmpty()) {
@@ -334,23 +332,20 @@ public final class HQLParser {
   }
 
   static boolean hasSpaces(String text) {
-    if (P_WSPACE.matcher(text).find()) {
-      return true;
-    }
-    return false;
+    return P_WSPACE.matcher(text).find();
   }
 
   /**
    * Recursively reconstruct query string given a query AST
    *
-   * @param root
+   * @param root root node
    * @param buf  preallocated builder where the reconstructed string will be written
    */
   public static void toInfixString(ASTNode root, StringBuilder buf) {
     if (root == null) {
       return;
     }
-    int rootType = root.getToken().getType();
+    int rootType = root.getType();
     String rootText = root.getText();
     // Operand, print contents
     if (Identifier == rootType || Number == rootType || StringLiteral == rootType || KW_TRUE == rootType
@@ -362,11 +357,11 @@ public final class HQLParser {
         buf.append(" true ");
       } else if (KW_FALSE == rootType) {
         buf.append(" false ");
-      } else if (Identifier == rootType && TOK_SELEXPR == ((ASTNode) root.getParent()).getToken().getType()) {
+      } else if (Identifier == rootType && TOK_SELEXPR == root.getParent().getType()) {
         // back quote column alias in all cases. This is required since some alias values can match DB keywords
         // (example : year as alias) and in such case queries can fail on certain DBs if the alias in not back quoted
         buf.append(" as `").append(rootText).append("` ");
-      } else if (Identifier == rootType && TOK_FUNCTIONSTAR == ((ASTNode) root.getParent()).getToken().getType()) {
+      } else if (Identifier == rootType && TOK_FUNCTIONSTAR == root.getParent().getType()) {
         // count(*) or count(someTab.*): Don't append space after the identifier
         buf.append(" ").append(rootText == null ? "" : rootText.toLowerCase());
       } else {
@@ -388,10 +383,10 @@ public final class HQLParser {
         }
       }
       buf.append("(*) ");
-    } else if (UNARY_OPERATORS.contains(Integer.valueOf(rootType))) {
+    } else if (UNARY_OPERATORS.contains(rootType)) {
       if (KW_NOT == rootType) {
         // Check if this is actually NOT IN
-        if (!(findNodeByPath(root, TOK_FUNCTION, KW_IN) != null)) {
+        if (findNodeByPath(root, TOK_FUNCTION, KW_IN) == null) {
           buf.append(" not ");
         }
       } else if (TILDE == rootType) {
@@ -402,8 +397,15 @@ public final class HQLParser {
         toInfixString((ASTNode) root.getChild(i), buf);
       }
 
-    } else if (BINARY_OPERATORS.contains(Integer.valueOf(root.getToken().getType()))) {
-      buf.append("(");
+    } else if (BINARY_OPERATORS.contains(rootType)) {
+      boolean surround = true;
+      if (N_ARY_OPERATORS.contains(rootType)
+        && (root.getParent() == null || rootType == root.getParent().getType())) {
+        surround = false;
+      }
+      if (surround) {
+        buf.append("(");
+      }
       if (MINUS == rootType && root.getChildCount() == 1) {
         // If minus has only one child, then it's a unary operator.
         // Add Operator name first
@@ -414,7 +416,7 @@ public final class HQLParser {
         // Left operand
         toInfixString((ASTNode) root.getChild(0), buf);
         // Operator name
-        if (root.getToken().getType() != DOT) {
+        if (rootType != DOT) {
           buf.append(' ').append(rootText.toLowerCase()).append(' ');
         } else {
           buf.append(rootText.toLowerCase());
@@ -422,7 +424,9 @@ public final class HQLParser {
         // Right operand
         toInfixString((ASTNode) root.getChild(1), buf);
       }
-      buf.append(")");
+      if (surround) {
+        buf.append(")");
+      }
     } else if (LSQUARE == rootType) {
       // square brackets for array and map types
       toInfixString((ASTNode) root.getChild(0), buf);
@@ -476,13 +480,13 @@ public final class HQLParser {
       } else {
         buf.append(rootText);
       }
-    } else if (TOK_FUNCTION == root.getToken().getType()) {
+    } else if (TOK_FUNCTION == root.getType()) {
       // Handle UDFs, conditional operators.
       functionString(root, buf);
 
     } else if (TOK_FUNCTIONDI == rootType) {
       // Distinct is a different case.
-      String fname = ((ASTNode) root.getChild(0)).getText();
+      String fname = root.getChild(0).getText();
 
       buf.append(fname.toLowerCase()).append("( distinct ");
 
@@ -496,7 +500,6 @@ public final class HQLParser {
       buf.append(")");
 
     } else if (TOK_TABSORTCOLNAMEDESC == rootType || TOK_TABSORTCOLNAMEASC == rootType) {
-      // buf.append("(");
       for (int i = 0; i < root.getChildCount(); i++) {
         StringBuilder orderByCol = new StringBuilder();
         toInfixString((ASTNode) root.getChild(i), orderByCol);
@@ -507,12 +510,7 @@ public final class HQLParser {
         buf.append(colStr);
         buf.append(" ");
       }
-      if (TOK_TABSORTCOLNAMEDESC == rootType) {
-        buf.append(" desc ");
-      } else if (TOK_TABSORTCOLNAMEASC == rootType) {
-        buf.append(" asc ");
-      }
-      // buf.append(")");
+      buf.append(" ").append(rootType == TOK_TABSORTCOLNAMEDESC ? "desc" : "asc").append(" ");
     } else if (TOK_SELECT == rootType || TOK_ORDERBY == rootType || TOK_GROUPBY == rootType) {
       for (int i = 0; i < root.getChildCount(); i++) {
         toInfixString((ASTNode) root.getChild(i), buf);
@@ -622,7 +620,7 @@ public final class HQLParser {
       toInfixString((ASTNode) root.getChild(1), buf);
       buf.append(" is not null ");
 
-    } else if (((ASTNode) root.getChild(0)).getToken().getType() == Identifier
+    } else if (root.getChild(0).getType() == Identifier
       && ((ASTNode) root.getChild(0)).getToken().getText().equalsIgnoreCase("between")) {
       // Handle between and not in between
       ASTNode tokTrue = findNodeByPath(root, KW_TRUE);
@@ -650,7 +648,7 @@ public final class HQLParser {
 
       // check if this is NOT In
       ASTNode rootParent = (ASTNode) root.getParent();
-      if (rootParent != null && rootParent.getToken().getType() == KW_NOT) {
+      if (rootParent != null && rootParent.getType() == KW_NOT) {
         buf.append(" not ");
       }
 
@@ -670,7 +668,7 @@ public final class HQLParser {
       buf.append(" as ");
       toInfixString((ASTNode) root.getChild(0), buf);
     } else {
-      int rootType = ((ASTNode) root.getChild(0)).getToken().getType();
+      int rootType = root.getChild(0).getType();
       if (PRIMITIVE_TYPES.contains(rootType)) {
         // cast expression maps to the following ast
         // KW_CAST LPAREN expression KW_AS primitiveType RPAREN -> ^(TOK_FUNCTION primitiveType expression)
@@ -681,7 +679,7 @@ public final class HQLParser {
         buf.append(")");
       } else {
         // Normal UDF
-        String fname = ((ASTNode) root.getChild(0)).getText();
+        String fname = root.getChild(0).getText();
         // Function name
         buf.append(fname.toLowerCase()).append("(");
         // Arguments separated by comma
@@ -705,14 +703,14 @@ public final class HQLParser {
   public static String getString(ASTNode tree) {
     StringBuilder buf = new StringBuilder();
     toInfixString(tree, buf);
-    return buf.toString();
+    return buf.toString().trim().replaceAll("\\s+", " ");
   }
 
   public static String getColName(ASTNode node) {
-    String colname = null;
-    int nodeType = node.getToken().getType();
+    String colname;
+    int nodeType = node.getType();
     if (nodeType == HiveParser.TOK_TABLE_OR_COL) {
-      colname = ((ASTNode) node.getChild(0)).getText();
+      colname = node.getChild(0).getText();
     } else {
       // node in 'alias.column' format
       ASTNode colIdent = (ASTNode) node.getChild(1);
@@ -723,7 +721,7 @@ public final class HQLParser {
   }
 
   public static boolean isAggregateAST(ASTNode node) {
-    int exprTokenType = node.getToken().getType();
+    int exprTokenType = node.getType();
     if (exprTokenType == HiveParser.TOK_FUNCTION || exprTokenType == HiveParser.TOK_FUNCTIONDI
       || exprTokenType == HiveParser.TOK_FUNCTIONSTAR) {
       assert (node.getChildCount() != 0);
@@ -739,7 +737,7 @@ public final class HQLParser {
   }
 
   public static boolean isNonAggregateFunctionAST(ASTNode node) {
-    int exprTokenType = node.getToken().getType();
+    int exprTokenType = node.getType();
     if (exprTokenType == HiveParser.TOK_FUNCTION || exprTokenType == HiveParser.TOK_FUNCTIONDI
       || exprTokenType == HiveParser.TOK_FUNCTIONSTAR) {
       assert (node.getChildCount() != 0);
@@ -760,11 +758,8 @@ public final class HQLParser {
   public static boolean isSelectASTNode(final ASTNode node) {
 
     Optional<Integer> astNodeType = getASTNodeType(node);
-    if (astNodeType.isPresent()) {
-      return astNodeType.get() == HiveParser.TOK_SELECT;
-    }
+    return astNodeType.isPresent() && astNodeType.get() == HiveParser.TOK_SELECT;
 
-    return false;
   }
 
   /**
@@ -776,14 +771,14 @@ public final class HQLParser {
 
     Optional<Integer> astNodeType = Optional.absent();
     if (node != null && node.getToken() != null) {
-      astNodeType = Optional.of(node.getToken().getType());
+      astNodeType = Optional.of(node.getType());
     }
 
     return astNodeType;
   }
 
   public static boolean hasAggregate(ASTNode node) {
-    int nodeType = node.getToken().getType();
+    int nodeType = node.getType();
     if (nodeType == HiveParser.TOK_TABLE_OR_COL || nodeType == HiveParser.DOT) {
       return false;
     } else {
@@ -813,12 +808,12 @@ public final class HQLParser {
       return true;
     }
 
-    if (n1.getToken().getType() != n2.getToken().getType()) {
+    if (n1.getType() != n2.getType()) {
       return false;
     }
 
     // Compare text. For literals, comparison is case sensitive
-    if ((n1.getToken().getType() == StringLiteral && !StringUtils.equals(n1.getText(), n2.getText()))) {
+    if ((n1.getType() == StringLiteral && !StringUtils.equals(n1.getText(), n2.getText()))) {
       return false;
     }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/6be4d48f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index de1d3ce..0415434 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -692,7 +692,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
       || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, "
         + "coalesce(mq1.dim11, mq2.dim11) dim11, mq1.msr12 msr12, mq2.roundedmsr2 roundedmsr2 from "), hqlQuery);
     assertTrue(hqlQuery.contains(joinSubString)
-      && hqlQuery.endsWith(endSubString + " WHERE (( alias0  +  roundedmsr2 ) <=  1000 )"), hqlQuery);
+      && hqlQuery.endsWith(endSubString + " WHERE (( alias0 + roundedmsr2 ) <= 1000 )"), hqlQuery);
 
     // No push-down-able having clauses.
     hqlQuery = rewrite("select dim1, dim11, msr12, roundedmsr2 from basecube where " + TWO_DAYS_RANGE
@@ -715,7 +715,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
       || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, coalesce(mq1.dim11, mq2.dim11) "
         + "dim11, mq1.msr12 msr12, mq2.roundedmsr2 roundedmsr2 from "), hqlQuery);
     assertTrue(hqlQuery.contains(joinSubString)
-      && hqlQuery.endsWith(endSubString + " WHERE (( alias0  +  roundedmsr2 ) <=  1000 )"), hqlQuery);
+      && hqlQuery.endsWith(endSubString + " WHERE (( alias0 + roundedmsr2 ) <= 1000 )"), hqlQuery);
 
     // function over expression of two functions over measures
     hqlQuery = rewrite("select dim1, dim11, msr12, roundedmsr2 from basecube where " + TWO_DAYS_RANGE
@@ -738,7 +738,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
       || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, coalesce(mq1.dim11, mq2.dim11) "
         + "dim11, mq1.msr12 msr12, mq2.roundedmsr2 roundedmsr2 from "), hqlQuery);
     assertTrue(hqlQuery.contains(joinSubString)
-      && hqlQuery.endsWith(endSubString + " WHERE (round(( alias0  +  roundedmsr2 )) <=  1000 )"), hqlQuery);
+      && hqlQuery.endsWith(endSubString + " WHERE (round(( alias0 + roundedmsr2 )) <= 1000 )"), hqlQuery);
 
 
     // Following test cases only select dimensions, and all the measures are in having.
@@ -779,7 +779,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(begin), hqlQuery);
     assertTrue(hqlQuery.contains(joinSubString)
-      && hqlQuery.endsWith(endSubString + " WHERE (( alias0  +  alias1 ) <=  1000 )"), hqlQuery);
+      && hqlQuery.endsWith(endSubString + " WHERE (( alias0 + alias1 ) <= 1000 )"), hqlQuery);
 
     hqlQuery = rewrite("select dim1, dim11 from basecube where " + TWO_DAYS_RANGE
       + "having msr12 > 2 and roundedmsr2 > 0 and flooredmsr12+roundedmsr2 <= 1000", conf);
@@ -797,7 +797,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(begin), hqlQuery);
     assertTrue(hqlQuery.contains(joinSubString)
-      && hqlQuery.endsWith(endSubString + " WHERE (( alias0  +  alias1 ) <=  1000 )"), hqlQuery);
+      && hqlQuery.endsWith(endSubString + " WHERE (( alias0 + alias1 ) <= 1000 )"), hqlQuery);
 
     hqlQuery = rewrite("select dim1, dim11 from basecube where " + TWO_DAYS_RANGE
       + "having msr12 > 2 or roundedmsr2 > 0 or flooredmsr12+roundedmsr2 <= 1000", conf);
@@ -810,7 +810,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
       "select basecube.dim1 as dim1, basecube.dim11 as dim11, round(sum(basecube.msr2)/1000) as alias1 FROM ",
       null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
-    String havingToWhere = " WHERE ((( alias0  >  2 ) or ( alias1  >  0 )) or (( alias2  +  alias1 ) <=  1000 ))";
+    String havingToWhere = " WHERE (( alias0 > 2 ) or ( alias1 > 0 ) or (( alias2 + alias1 ) <= 1000 ))";
 
     assertFalse(hqlQuery.toLowerCase().contains("having"));
     compareContains(expected1, hqlQuery);

http://git-wip-us.apache.org/repos/asf/lens/blob/6be4d48f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 9dde3cd..6ee45d8 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -1017,8 +1017,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
       + " on testCube.cityid = citydim.id where " + LAST_HOUR_TIME_RANGE;
 
     String expectedRewrittenQuery = "SELECT ( citydim . name ) as `Alias With Spaces` , sum(( testcube . msr2 )) "
-      + "as `TestMeasure`  FROM TestQueryRewrite.c2_testfact testcube inner JOIN TestQueryRewrite.c2_citytable citydim "
-      + "ON (( testcube . cityid ) = ( citydim . id )) WHERE (((( testcube . dt ) =  '"
+      + "as `TestMeasure` FROM TestQueryRewrite.c2_testfact testcube inner JOIN TestQueryRewrite.c2_citytable citydim "
+      + "ON (( testcube . cityid ) = ( citydim . id )) WHERE (((( testcube . dt ) = '"
       + getDateUptoHours(getDateWithOffset(HOURLY, -1)) + "' ))) GROUP BY ( citydim . name )";
 
     String actualRewrittenQuery = rewrite(inputQuery, getConfWithStorages("C2"));
@@ -1034,8 +1034,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
       + " on testCube.cityid = citydim.id where " + LAST_HOUR_TIME_RANGE;
 
     String expectedRewrittenQuery = "SELECT ( citydim . name ) as `Alias With Spaces` , sum(( testcube . msr2 )) "
-      + "as `TestMeasure`  FROM TestQueryRewrite.c2_testfact testcube inner JOIN TestQueryRewrite.c2_citytable citydim "
-      + "ON (( testcube . cityid ) = ( citydim . id )) WHERE (((( testcube . dt ) =  '"
+      + "as `TestMeasure` FROM TestQueryRewrite.c2_testfact testcube inner JOIN TestQueryRewrite.c2_citytable citydim "
+      + "ON (( testcube . cityid ) = ( citydim . id )) WHERE (((( testcube . dt ) = '"
       + getDateUptoHours(getDateWithOffset(HOURLY, -1)) + "' ))) GROUP BY ( citydim . name )";
 
     String actualRewrittenQuery = rewrite(inputQuery, getConfWithStorages("C2"));

http://git-wip-us.apache.org/repos/asf/lens/blob/6be4d48f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
index 669a8e9..64ada27 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
@@ -21,8 +21,8 @@ package org.apache.lens.cube.parse;
 
 import static org.apache.lens.cube.metadata.DateFactory.TWO_DAYS_RANGE;
 
-import java.util.ArrayList;
-import java.util.List;
+import java.util.HashSet;
+import java.util.Set;
 
 import org.apache.lens.cube.parse.ExpressionResolver.ExprSpecContext;
 
@@ -32,6 +32,8 @@ import org.testng.Assert;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
+import com.google.common.collect.Sets;
+
 public class TestExpressionContext extends TestQueryRewrite {
 
   private Configuration conf;
@@ -50,41 +52,41 @@ public class TestExpressionContext extends TestQueryRewrite {
   public void testNestedExpressions() throws Exception {
     CubeQueryContext nestedExprQL = rewriteCtx("select nestedexpr from testCube where " + TWO_DAYS_RANGE, conf);
     Assert.assertNotNull(nestedExprQL.getExprCtx());
-    List<String> expectedExpressions = new ArrayList<>();
-    expectedExpressions.add("avg(( testcube . roundedmsr2 ))");
-    expectedExpressions.add("avg(( testcube . equalsums ))");
-    expectedExpressions.add(" case  when (( testcube . substrexpr ) =  'xyz' ) then avg(( testcube . msr5 ))"
-      + " when (( testcube . substrexpr ) =  'abc' ) then (avg(( testcube . msr4 )) /  100 ) end ");
-    expectedExpressions.add("avg(round((( testcube . msr2 ) /  1000 )))");
-    expectedExpressions.add("avg((( testcube . msr3 ) + ( testcube . msr4 )))");
-    expectedExpressions.add("avg(((( testcube . msr3 ) + ( testcube . msr2 )) /  100 ))");
-    expectedExpressions.add(" case  when (substr(( testcube . dim1 ),  3 ) =  'xyz' ) then avg(( testcube . msr5 ))"
-      + " when (substr(( testcube . dim1 ),  3 ) =  'abc' ) then (avg(( testcube . msr4 )) /  100 ) end ");
-    expectedExpressions.add(" case  when (substr(ascii(( dim2chain . name )),  3 ) =  'xyz' ) then"
-      + " avg(( testcube . msr5 )) when (substr(ascii(( dim2chain . name )),  3 ) =  'abc' ) then"
-      + " (avg(( testcube . msr4 )) /  100 ) end ");
-    expectedExpressions.add(" case  when (substr(( testcube . dim1 ),  3 ) =  'xyz' ) then avg((( testcube . msr2 )"
-      + " + ( testcube . msr3 ))) when (substr(( testcube . dim1 ),  3 ) =  'abc' ) then"
-      + " (avg(( testcube . msr4 )) /  100 ) end ");
-    expectedExpressions.add(" case  when (substr(ascii(( dim2chain . name )),  3 ) =  'xyz' ) then"
-      + " avg((( testcube . msr2 ) + ( testcube . msr3 ))) when (substr(ascii(( dim2chain . name )),  3 ) =  'abc' )"
-      + " then (avg(( testcube . msr4 )) /  100 ) end ");
-    expectedExpressions.add(" case  when (( testcube . substrexpr ) =  'xyz' ) then avg((( testcube . msr2 )"
-      + " + ( testcube . msr3 ))) when (( testcube . substrexpr ) =  'abc' ) then (avg(( testcube . msr4 )) /  100 )"
-      + " end ");
-    expectedExpressions.add(" case  when (substr(( testcube . dim1 ),  3 ) =  'xyz' ) then avg((( testcube . msr2 )"
-      + " + ( testcube . msr3 ))) when (substr(( testcube . dim1 ),  3 ) =  'abc' ) then"
-      + " (avg(( testcube . msr4 )) /  100 ) end ");
-    expectedExpressions.add(" case  when (substr(ascii(( dim2chain . name )),  3 ) =  'xyz' ) then"
-      + " avg((( testcube . msr2 ) + ( testcube . msr3 ))) when (substr(ascii(( dim2chain . name )),  3 ) =  'abc' )"
-      + " then (avg(( testcube . msr4 )) /  100 ) end ");
+    Set<String> expectedExpressions = Sets.newHashSet(
+      "avg(( testcube . roundedmsr2 ))",
+      "avg(( testcube . equalsums ))",
+      "case when (( testcube . substrexpr ) = 'xyz' ) then avg(( testcube . msr5 ))"
+        + " when (( testcube . substrexpr ) = 'abc' ) then (avg(( testcube . msr4 )) / 100 ) end",
+      "avg(round((( testcube . msr2 ) / 1000 )))",
+      "avg((( testcube . msr3 ) + ( testcube . msr4 )))",
+      "avg(((( testcube . msr3 ) + ( testcube . msr2 )) / 100 ))",
+      "case when (substr(( testcube . dim1 ), 3 ) = 'xyz' ) then avg(( testcube . msr5 ))"
+        + " when (substr(( testcube . dim1 ), 3 ) = 'abc' ) then (avg(( testcube . msr4 )) / 100 ) end",
+      "case when (substr(ascii(( dim2chain . name )), 3 ) = 'xyz' ) then"
+        + " avg(( testcube . msr5 )) when (substr(ascii(( dim2chain . name )), 3 ) = 'abc' ) then"
+        + " (avg(( testcube . msr4 )) / 100 ) end",
+      "case when (substr(( testcube . dim1 ), 3 ) = 'xyz' ) then avg((( testcube . msr2 )"
+        + " + ( testcube . msr3 ))) when (substr(( testcube . dim1 ), 3 ) = 'abc' ) then"
+        + " (avg(( testcube . msr4 )) / 100 ) end",
+      "case when (substr(ascii(( dim2chain . name )), 3 ) = 'xyz' ) then"
+        + " avg((( testcube . msr2 ) + ( testcube . msr3 ))) when (substr(ascii(( dim2chain . name )), 3 ) = 'abc' )"
+        + " then (avg(( testcube . msr4 )) / 100 ) end",
+      "case when (( testcube . substrexpr ) = 'xyz' ) then avg((( testcube . msr2 )"
+        + " + ( testcube . msr3 ))) when (( testcube . substrexpr ) = 'abc' ) then (avg(( testcube . msr4 )) / 100 )"
+        + " end",
+      "case when (substr(( testcube . dim1 ), 3 ) = 'xyz' ) then avg((( testcube . msr2 )"
+        + " + ( testcube . msr3 ))) when (substr(( testcube . dim1 ), 3 ) = 'abc' ) then"
+        + " (avg(( testcube . msr4 )) / 100 ) end",
+      "case when (substr(ascii(( dim2chain . name )), 3 ) = 'xyz' ) then"
+        + " avg((( testcube . msr2 ) + ( testcube . msr3 ))) when (substr(ascii(( dim2chain . name )), 3 ) = 'abc' )"
+        + " then (avg(( testcube . msr4 )) / 100 ) end"
+    );
 
-    List<String> actualExpressions = new ArrayList<>();
+    Set<String> actualExpressions = new HashSet<>();
     for (ExprSpecContext esc : nestedExprQL.getExprCtx().getExpressionContext("nestedexpr", "testcube").getAllExprs()) {
       actualExpressions.add(HQLParser.getString(esc.getFinalAST()));
     }
-    Assert.assertTrue(actualExpressions.containsAll(expectedExpressions), actualExpressions.toString());
-    Assert.assertTrue(expectedExpressions.containsAll(actualExpressions), actualExpressions.toString());
+    Assert.assertEquals(actualExpressions, expectedExpressions);
   }
 
   @Test
@@ -92,20 +94,19 @@ public class TestExpressionContext extends TestQueryRewrite {
     CubeQueryContext nestedExprQL = rewriteCtx("select nestedExprWithTimes from testCube where " + TWO_DAYS_RANGE,
       conf);
     Assert.assertNotNull(nestedExprQL.getExprCtx());
-    List<String> expectedExpressions = new ArrayList<>();
-    expectedExpressions.add("avg(( testcube . roundedmsr2 ))");
-    expectedExpressions.add("avg(( testcube . equalsums ))");
-    expectedExpressions.add("avg(round((( testcube . msr2 ) /  1000 )))");
-    expectedExpressions.add("avg((( testcube . msr3 ) + ( testcube . msr4 )))");
-    expectedExpressions.add("avg(((( testcube . msr3 ) + ( testcube . msr2 )) /  100 ))");
+    Set<String> expectedExpressions = Sets.newHashSet(
+      "avg(( testcube . roundedmsr2 ))",
+      "avg(( testcube . equalsums ))",
+      "avg(round((( testcube . msr2 ) / 1000 )))",
+      "avg((( testcube . msr3 ) + ( testcube . msr4 )))",
+      "avg(((( testcube . msr3 ) + ( testcube . msr2 )) / 100 ))"
+    );
 
-    List<String> actualExpressions = new ArrayList<>();
+    Set<String> actualExpressions = new HashSet<>();
     for (ExprSpecContext esc : nestedExprQL.getExprCtx()
       .getExpressionContext("nestedexprwithtimes", "testcube").getAllExprs()) {
       actualExpressions.add(HQLParser.getString(esc.getFinalAST()));
     }
-    Assert.assertTrue(actualExpressions.containsAll(expectedExpressions), actualExpressions.toString());
-    Assert.assertTrue(expectedExpressions.containsAll(actualExpressions), actualExpressions.toString());
+    Assert.assertEquals(actualExpressions, expectedExpressions);
   }
-
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/6be4d48f/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java
index 4afd403..f9d7457 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestHQLParser.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.ParseException;
 
 import org.testng.Assert;
+import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import lombok.extern.slf4j.Slf4j;
@@ -43,11 +44,11 @@ public class TestHQLParser {
     ASTNode node = HQLParser.parseHQL(query, conf);
 
     ASTNode groupby = HQLParser.findNodeByPath(node, TOK_INSERT, TOK_GROUPBY);
-    String expected = "a , f( b ), ( d  +  e )";
+    String expected = "a , f( b ), ( d + e )";
     Assert.assertEquals(expected, HQLParser.getString(groupby).trim());
 
     ASTNode orderby = HQLParser.findNodeByPath(node, TOK_INSERT, HiveParser.TOK_ORDERBY);
-    String expectedOrderBy = "a  asc , g( b )  asc ,  e  /  100   asc";
+    String expectedOrderBy = "a asc , g( b ) asc , e / 100 asc";
     System.out.println("###Actual order by:" + HQLParser.getString(orderby).trim());
     Assert.assertEquals(expectedOrderBy, HQLParser.getString(orderby).trim());
   }
@@ -60,12 +61,12 @@ public class TestHQLParser {
 
     ASTNode select = HQLParser.findNodeByPath(tree, TOK_INSERT, TOK_SELECT);
     String selectStr = HQLParser.getString(select).trim();
-    String expectedSelect = "'abc'  as `col1` ,  'DEF'  as `col2`";
+    String expectedSelect = "'abc' as `col1` , 'DEF' as `col2`";
     Assert.assertEquals(expectedSelect, selectStr);
 
     ASTNode where = HQLParser.findNodeByPath(tree, TOK_INSERT, TOK_WHERE);
     String whereStr = HQLParser.getString(where).trim();
-    String expectedWhere = "(( col3  =  'GHI' ) and ( col4  =  'JKLmno' ))";
+    String expectedWhere = "(( col3 = 'GHI' ) and ( col4 = 'JKLmno' ))";
     Assert.assertEquals(expectedWhere, whereStr);
   }
 
@@ -79,9 +80,8 @@ public class TestHQLParser {
     String selectStr = HQLParser.getString(select);
     System.out.println("reconstructed clause ");
     System.out.println(selectStr);
-    Assert.assertEquals("case ((( col1  *  100 ) /  200 ) +  5 ) "
-        + "when  'ABC'  then  'def'  when  'EFG'  then  'hij'  " + "else  'XyZ'  end  as `ComplexCaseStatement`",
-      selectStr.trim());
+    Assert.assertEquals(selectStr.trim(), "case ((( col1 * 100 ) / 200 ) + 5 ) when 'ABC' then 'def' when 'EFG' "
+      + "then 'hij' else 'XyZ' end as `ComplexCaseStatement`");
 
     String q2 = "SELECT " + "CASE WHEN col1 = 'abc' then 'def' " + "when col1 = 'ghi' then 'jkl' "
       + "else 'none' END AS Complex_Case_Statement_2" + " from FOO";
@@ -91,8 +91,8 @@ public class TestHQLParser {
     selectStr = HQLParser.getString(select);
     System.out.println("reconstructed clause 2");
     System.out.println(selectStr);
-    Assert.assertEquals("case  when ( col1  =  'abc' ) then  'def'  " + "when ( col1  =  'ghi' ) then  'jkl'  "
-      + "else  'none'  end  as `Complex_Case_Statement_2`", selectStr.trim());
+    Assert.assertEquals(selectStr.trim(), "case when ( col1 = 'abc' ) then 'def' when ( col1 = 'ghi' ) then 'jkl' "
+      + "else 'none' end as `Complex_Case_Statement_2`");
 
     String q3 = "SELECT  " + "CASE (col1 * 100)/200 + 5 " + "WHEN 'ABC' THEN 'def' " + "WHEN 'EFG' THEN 'hij' "
       + "END AS ComplexCaseStatement FROM FOO";
@@ -102,8 +102,8 @@ public class TestHQLParser {
     selectStr = HQLParser.getString(select);
     System.out.println("reconstructed clause ");
     System.out.println(selectStr);
-    Assert.assertEquals("case ((( col1  *  100 ) /  200 ) +  5 ) " + "when  'ABC'  then  'def'  when  'EFG'  "
-      + "then  'hij'  end  as `ComplexCaseStatement`", selectStr.trim());
+    Assert.assertEquals(selectStr.trim(), "case ((( col1 * 100 ) / 200 ) + 5 ) when 'ABC' then 'def' when 'EFG' "
+      + "then 'hij' end as `ComplexCaseStatement`");
 
     String q4 = "SELECT " + "CASE WHEN col1 = 'abc' then 'def' " + "when col1 = 'ghi' then 'jkl' "
       + "END AS Complex_Case_Statement_2" + " from FOO";
@@ -113,8 +113,8 @@ public class TestHQLParser {
     selectStr = HQLParser.getString(select);
     System.out.println("reconstructed clause 2");
     System.out.println(selectStr);
-    Assert.assertEquals("case  when ( col1  =  'abc' ) then  " + "'def'  when ( col1  =  'ghi' ) then  'jkl'  "
-      + "end  as `Complex_Case_Statement_2`", selectStr.trim());
+    Assert.assertEquals(selectStr.trim(), "case when ( col1 = 'abc' ) then 'def' when ( col1 = 'ghi' ) then 'jkl' end "
+      + "as `Complex_Case_Statement_2`");
 
   }
 
@@ -124,7 +124,7 @@ public class TestHQLParser {
     ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     String whereStr = HQLParser.getString(where);
     System.out.println(whereStr);
-    Assert.assertEquals("col1  is null", whereStr.trim());
+    Assert.assertEquals("col1 is null", whereStr.trim());
   }
 
   @Test
@@ -133,7 +133,7 @@ public class TestHQLParser {
     ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     String whereStr = HQLParser.getString(where);
     System.out.println(whereStr);
-    Assert.assertEquals("col1  is not null", whereStr.trim());
+    Assert.assertEquals("col1 is not null", whereStr.trim());
   }
 
   @Test
@@ -142,7 +142,7 @@ public class TestHQLParser {
     ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     String whereStr = HQLParser.getString(where);
     System.out.println(whereStr);
-    Assert.assertEquals("col1  between  10  and  100", whereStr.trim());
+    Assert.assertEquals("col1 between 10 and 100", whereStr.trim());
   }
 
   @Test
@@ -151,7 +151,7 @@ public class TestHQLParser {
     ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     String whereStr = HQLParser.getString(where);
     System.out.println(whereStr);
-    Assert.assertEquals("col1  not between  10  and  100", whereStr.trim());
+    Assert.assertEquals("col1 not between 10 and 100", whereStr.trim());
   }
 
   @Test
@@ -161,11 +161,9 @@ public class TestHQLParser {
 
     ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     String whereStr = HQLParser.getString(where);
-    String expected = "(((((((((( a  <=>  10 ) and (( b  &  c ) =  10 )) "
-      + "and (( d  |  e ) =  10 )) and (( f  ^  g ) =  10 )) "
-      + "and (( h  %  2 ) =  1 )) and ( ~  i  =  10 )) and  not  j ) " + "and  not  k ) and  true ) and  false )";
-    System.out.println(whereStr);
-    Assert.assertEquals(expected, whereStr.trim());
+    String expected = "(( a <=> 10 ) and (( b & c ) = 10 ) and (( d | e ) = 10 ) and (( f ^ g ) = 10 ) and "
+      + "(( h % 2 ) = 1 ) and ( ~ i = 10 ) and not j and not k and true and false )";
+    Assert.assertEquals(whereStr.trim(), expected);
   }
 
   @Test
@@ -175,7 +173,7 @@ public class TestHQLParser {
     ASTNode select = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_SELECT);
     String selectStr = HQLParser.getString(select);
     System.out.println(selectStr);
-    Assert.assertEquals("a [ 2 ],  b [ 'key' ], ( c . d )", selectStr.trim());
+    Assert.assertEquals(selectStr.trim(), "a [ 2 ], b [ 'key' ], ( c . d )");
   }
 
   @Test
@@ -184,13 +182,13 @@ public class TestHQLParser {
     ASTNode where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     String whereStr = HQLParser.getString(where);
     System.out.println(whereStr);
-    Assert.assertEquals("a  in ( 'B'  ,  'C'  ,  'D'  ,  'E'  ,  'F' )", whereStr.trim());
+    Assert.assertEquals(whereStr.trim(), "a in ( 'B' , 'C' , 'D' , 'E' , 'F' )");
 
     q1 = "SELECT * FROM FOO WHERE A NOT IN ('B', 'C', 'D', 'E', 'F')";
     where = HQLParser.findNodeByPath(HQLParser.parseHQL(q1, conf), TOK_INSERT, TOK_WHERE);
     whereStr = HQLParser.getString(where);
     System.out.println(whereStr);
-    Assert.assertEquals("a  not  in ( 'B'  ,  'C'  ,  'D'  ,  'E'  ,  'F' )", whereStr.trim());
+    Assert.assertEquals(whereStr.trim(), "a not in ( 'B' , 'C' , 'D' , 'E' , 'F' )");
   }
 
   @Test
@@ -232,8 +230,8 @@ public class TestHQLParser {
       = "select tab1.a, tab2.b from table1 tab1 inner join table tab2 on tab1.id = tab2.id where tab1.a > 123";
     ASTNode node = HQLParser.parseHQL(query, conf);
     ASTNode temp = HQLParser.findNodeByPath(node, TOK_FROM, TOK_JOIN);
-    String expected = " table1  tab1  table  tab2 (( tab1 . id ) = ( tab2 . id ))";
-    Assert.assertEquals(expected, HQLParser.getString(temp));
+    String expected = "table1 tab1 table tab2 (( tab1 . id ) = ( tab2 . id ))";
+    Assert.assertEquals(HQLParser.getString(temp), expected);
   }
 
   @Test
@@ -241,6 +239,7 @@ public class TestHQLParser {
     String query = "select id as `an id` from sample_dim";
     try {
       ASTNode tree = HQLParser.parseHQL(query, conf);
+      Assert.assertNotNull(tree);
     } catch (NullPointerException exc) {
       log.error("should not have thrown npe", exc);
       Assert.fail("should not have thrown npe");
@@ -249,12 +248,12 @@ public class TestHQLParser {
 
   @Test
   public void testAliasShouldBeQuoted() throws Exception {
-    Assert.assertEquals(getSelectStrForQuery("select id as identity from sample_dim"), "id  as `identity`");
+    Assert.assertEquals(getSelectStrForQuery("select id as identity from sample_dim"), "id as `identity`");
     Assert.assertEquals(getSelectStrForQuery("select id as `column identity` from sample_dim"),
-        "id  as `column identity`");
-    Assert.assertEquals(getSelectStrForQuery("select id identity from sample_dim"), "id  as `identity`");
+        "id as `column identity`");
+    Assert.assertEquals(getSelectStrForQuery("select id identity from sample_dim"), "id as `identity`");
     Assert.assertEquals(getSelectStrForQuery("select id `column identity` from sample_dim"),
-        "id  as `column identity`");
+        "id as `column identity`");
   }
 
   private String getSelectStrForQuery(String query) throws Exception {
@@ -269,21 +268,21 @@ public class TestHQLParser {
     ASTNode select = HQLParser.findNodeByPath(HQLParser.parseHQL(query, conf), TOK_INSERT, TOK_SELECT);
     String selectStr = HQLParser.getString(select);
     System.out.println(selectStr);
-    Assert.assertEquals(" * ", selectStr);
+    Assert.assertEquals(selectStr, "*");
 
     query = "select tab.*, tab2.a, tab2.b from tab";
     ASTNode ast = HQLParser.parseHQL(query, conf);
     select = HQLParser.findNodeByPath(ast, TOK_INSERT, TOK_SELECT);
     selectStr = HQLParser.getString(select);
     System.out.println(selectStr);
-    Assert.assertEquals(" tab . * , ( tab2 . a ), ( tab2 . b )", selectStr);
+    Assert.assertEquals(selectStr, "tab . * , ( tab2 . a ), ( tab2 . b )");
 
     query = "select count(*) from tab";
     ast = HQLParser.parseHQL(query, conf);
     select = HQLParser.findNodeByPath(ast, TOK_INSERT, TOK_SELECT);
     selectStr = HQLParser.getString(select);
     System.out.println(selectStr);
-    Assert.assertEquals(" count(*) ", selectStr);
+    Assert.assertEquals("count(*)", selectStr);
 
     query = "select count(tab.*) from tab";
     ast = HQLParser.parseHQL(query, conf);
@@ -309,8 +308,8 @@ public class TestHQLParser {
     String genQuery2 = HQLParser.getString(selectAST);
     System.out.println("genQuery2: " + genQuery2);
 
-    Assert.assertFalse(genQuery2.contains("1  -"));
-    Assert.assertTrue(genQuery2.contains("-  1"));
+    Assert.assertFalse(genQuery2.contains("1 -"));
+    Assert.assertTrue(genQuery2.contains("- 1"));
 
     // Validate returned string is parseable
     HQLParser.printAST(HQLParser.findNodeByPath(HQLParser.parseHQL("SELECT " + genQuery2 + " FROM table1", conf),
@@ -362,4 +361,25 @@ public class TestHQLParser {
     String genQuery = HQLParser.getString(selectAST);
     Assert.assertEquals(genQuery, select);
   }
+
+  @DataProvider
+  public Object[][] nAryFlatteningDataProvider() {
+    return new Object[][] {
+      {"a", "a"},
+      {"a or b", "a or b"},
+      {"a or b or c or d", "a or b or c or d"},
+      {"a and b and c and d", "a and b and c and d"},
+      {"a and (b or c)", "a and ( b or c )"},
+      {"a and (b or c or d) and (e or f) and (g and h)", "a and ( b or c or d ) and ( e or f ) and g and h"},
+      // ambiguous, but uniquely understood, or > and.
+      {"a and b or c or d and e or f and g and h", "( a and b ) or c or ( d and e ) or ( f and g and h )"},
+    };
+  }
+
+  @Test(dataProvider = "nAryFlatteningDataProvider")
+  public void testNAryOperatorFlattening(String input, String expected) throws LensException {
+    ASTNode tree = HQLParser.parseExpr(input);
+    String infixString = HQLParser.getString(tree);
+    Assert.assertEquals(infixString, expected);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/6be4d48f/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
index 5614527..a004de9 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/ColumnarSQLRewriter.java
@@ -21,6 +21,7 @@ package org.apache.lens.driver.jdbc;
 import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
 import java.util.*;
+import java.util.regex.Pattern;
 
 import org.apache.lens.api.util.CommonUtils;
 import org.apache.lens.cube.metadata.CubeMetastoreClient;
@@ -65,9 +66,6 @@ public class ColumnarSQLRewriter implements QueryRewriter {
   /** The query. */
   protected String query;
 
-  /** The final fact query. */
-  private String finalFactQuery;
-
   /** The limit. */
   private String limit;
 
@@ -86,9 +84,6 @@ public class ColumnarSQLRewriter implements QueryRewriter {
   /** The rewritten query. */
   protected StringBuilder rewrittenQuery = new StringBuilder();
 
-  /** The merged query. */
-  protected StringBuilder mergedQuery = new StringBuilder();
-
   /** The fact filters for push down */
   protected StringBuilder factFilterPush = new StringBuilder();
 
@@ -104,9 +99,6 @@ public class ColumnarSQLRewriter implements QueryRewriter {
   /** The agg column. */
   protected List<String> aggColumn = new ArrayList<String>();
 
-  /** The filter in join cond. */
-  protected List<String> filterInJoinCond = new ArrayList<String>();
-
   /** The right filter. */
   protected List<String> rightFilter = new ArrayList<String>();
 
@@ -768,8 +760,9 @@ public class ColumnarSQLRewriter implements QueryRewriter {
         String alias = "alias" + String.valueOf(count);
         String allaggmeasures = aggmeasures.append(measure).append(" as ").append(alias).toString();
         String aggColAlias = funident + "(" + alias + ")";
-
-        mapAggTabAlias.put(measure, aggColAlias);
+        String measureRegex = "\\s*" + Pattern.quote(funident)
+          + "\\s*\\(\\s*\\Q" + aggCol.replaceAll("\\s+", "\\\\E\\\\s+\\\\Q") + "\\E\\s*\\)\\s*";
+        mapAggTabAlias.put(measureRegex, aggColAlias);
         if (!aggColumn.contains(allaggmeasures)) {
           aggColumn.add(allaggmeasures);
         }
@@ -1050,13 +1043,13 @@ public class ColumnarSQLRewriter implements QueryRewriter {
     // sub query query to the outer query
 
     for (Map.Entry<String, String> entry : mapAggTabAlias.entrySet()) {
-      selectTree = selectTree.replace(entry.getKey(), entry.getValue());
+      selectTree = selectTree.replaceAll(entry.getKey(), entry.getValue());
 
       if (orderByTree != null) {
-        orderByTree = orderByTree.replace(entry.getKey(), entry.getValue());
+        orderByTree = orderByTree.replaceAll(entry.getKey(), entry.getValue());
       }
       if (havingTree != null) {
-        havingTree = havingTree.replace(entry.getKey(), entry.getValue());
+        havingTree = havingTree.replaceAll(entry.getKey(), entry.getValue());
       }
     }
     //for subquery with count function should be replaced with sum in outer query

http://git-wip-us.apache.org/repos/asf/lens/blob/6be4d48f/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
index c412cf0..1aa7491 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
@@ -247,9 +247,9 @@ public class TestColumnarSQLRewriter {
     SessionState.start(hconf);
     qtest.rewrite(query, conf, hconf);
     String expected = "sales_fact___fact.time_key in  (  select time_dim .time_key from time_dim "
-      + "where ( time_dim. time_key ) between  '2013-01-01'  and  '2013-01-31'  ) and "
+      + "where ( time_dim. time_key ) between '2013-01-01' and '2013-01-31' ) and "
       + "sales_fact___fact.location_key in  (  select location_dim .location_key from "
-      + "location_dim where (( location_dim. location_key ) =  'some-loc' ) ) and ";
+      + "location_dim where (( location_dim. location_key ) = 'some-loc' ) ) and ";
     Assert.assertEquals(qtest.allSubQueries.toString().trim(), expected.trim());
   }
 
@@ -1277,13 +1277,13 @@ public class TestColumnarSQLRewriter {
       assertEquals(HQLParser.getString(rewriter.getSelectAST()).trim(), "( t1 . id1 ), ( t2 . id2 ), ( t3 . id3 ),"
         + " ( t1 . name1 ), ( t2 . name2 ), ( t3 . name3 ), count( 1 )",
         "Found :" + HQLParser.getString(rewriter.getSelectAST()));
-      assertEquals(HQLParser.getString(rewriter.getWhereAST()).trim(), "(( t1 . id1 ) =  100 )",
+      assertEquals(HQLParser.getString(rewriter.getWhereAST()).trim(), "(( t1 . id1 ) = 100 )",
         "Found: " + HQLParser.getString(rewriter.getWhereAST()));
       assertEquals(HQLParser.getString(rewriter.getGroupByAST()).trim(), "( t2 . id2 )",
         "Found: " + HQLParser.getString(rewriter.getGroupByAST()));
-      assertEquals(HQLParser.getString(rewriter.getOrderByAST()).trim(), "t3 . id3   asc",
+      assertEquals(HQLParser.getString(rewriter.getOrderByAST()).trim(), "t3 . id3 asc",
         "Found: " + HQLParser.getString(rewriter.getOrderByAST()));
-      assertEquals(HQLParser.getString(rewriter.getHavingAST()).trim(), "(count(( t1 . id1 )) >  2 )",
+      assertEquals(HQLParser.getString(rewriter.getHavingAST()).trim(), "(count(( t1 . id1 )) > 2 )",
         "Found: " + HQLParser.getString(rewriter.getHavingAST()));
       assertTrue(fromStringAfterRewrite.contains("( t1 . t2id ) = ( t2 . id2 )")
         && fromStringAfterRewrite.contains("( t2 . t3id ) = ( t3 . id3 )"), fromStringAfterRewrite);