You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by am...@apache.org on 2015/08/19 11:01:16 UTC

[2/3] incubator-lens git commit: LENS-187 : Move cube specific error message codes from Hive code to Lens

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 5bdb412..1a347b2 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -35,12 +35,11 @@ import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
 import org.apache.lens.cube.parse.HQLParser.TreeNode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.antlr.runtime.CommonToken;
 
@@ -79,7 +78,7 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     ExpressionContext(CubeQueryContext cubeql, ExprColumn exprCol, AbstractBaseTable srcTable, String srcAlias)
-      throws SemanticException {
+      throws LensException {
       this.srcTable = srcTable;
       this.exprCol = exprCol;
       this.srcAlias = srcAlias;
@@ -89,19 +88,16 @@ class ExpressionResolver implements ContextRewriter {
       resolveColumnsAndAlias(cubeql);
       log.debug("All exprs for {} are {}", exprCol.getName(), allExprs);
     }
-    private void resolveColumnsAndAlias(CubeQueryContext cubeql) throws SemanticException {
+
+    private void resolveColumnsAndAlias(CubeQueryContext cubeql) throws LensException {
       for (ExprSpecContext esc : allExprs) {
         esc.resolveColumns(cubeql);
         esc.replaceAliasInAST(cubeql);
         for (String table : esc.getTblAliasToColumns().keySet()) {
-          try {
-            if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && !srcAlias.equals(table)) {
-              cubeql.addOptionalDimTable(table, null,
-                false, null, false, esc.getTblAliasToColumns().get(table).toArray(new String[0]));
-              esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
-            }
-          } catch (HiveException e) {
-            throw new SemanticException(e);
+          if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && !srcAlias.equals(table)) {
+            cubeql.addOptionalDimTable(table, null, false, null, false,
+                esc.getTblAliasToColumns().get(table).toArray(new String[0]));
+            esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
           }
         }
       }
@@ -109,14 +105,14 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     private void resolveColumnsAndReplaceAlias(CubeQueryContext cubeql, Set<ExprSpecContext> exprs)
-      throws SemanticException {
+      throws LensException {
       Set<ExprSpecContext> nestedExpressions = new LinkedHashSet<ExprSpecContext>();
       for (ExprSpecContext esc : exprs) {
         for (Map.Entry<String, Set<String>> entry : esc.getTblAliasToColumns().entrySet()) {
           if (entry.getKey().equals(CubeQueryContext.DEFAULT_TABLE)) {
             continue;
           }
-          AbstractBaseTable baseTable = (AbstractBaseTable)cubeql.getCubeTableForAlias(entry.getKey());
+          AbstractBaseTable baseTable = (AbstractBaseTable) cubeql.getCubeTableForAlias(entry.getKey());
           Set<String> exprCols = new HashSet<String>();
           for (String col : entry.getValue()) {
             // col is an expression
@@ -132,14 +128,10 @@ class ExpressionResolver implements ContextRewriter {
         esc.resolveColumns(cubeql);
         esc.replaceAliasInAST(cubeql);
         for (String table : esc.getTblAliasToColumns().keySet()) {
-          try {
-            if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && !srcAlias.equals(table)) {
-              cubeql.addOptionalDimTable(table, null, false, null, false,
+          if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && !srcAlias.equals(table)) {
+            cubeql.addOptionalDimTable(table, null, false, null, false,
                 esc.getTblAliasToColumns().get(table).toArray(new String[0]));
-              esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
-            }
-          } catch (HiveException e) {
-            throw new SemanticException(e);
+            esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
           }
         }
       }
@@ -147,7 +139,7 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     private void addAllNestedExpressions(CubeQueryContext cubeql, ExprSpecContext baseEsc, AbstractBaseTable baseTable,
-      Set<ExprSpecContext> nestedExpressions, Set<String> exprCols) throws SemanticException {
+      Set<ExprSpecContext> nestedExpressions, Set<String> exprCols) throws LensException {
       for (String col : exprCols) {
         Set<ExprSpecContext> replacedExpressions = new LinkedHashSet<ExprSpecContext>();
         for (ExprSpec es : baseTable.getExpressionByName(col).getExpressionSpecs()) {
@@ -168,7 +160,8 @@ class ExpressionResolver implements ContextRewriter {
     void addDirectlyAvailable(CandidateTable cTable) {
       directlyAvailableIn.add(cTable);
     }
-    void addEvaluable(CubeQueryContext cubeql, CandidateTable cTable, ExprSpecContext esc) throws SemanticException {
+
+    void addEvaluable(CubeQueryContext cubeql, CandidateTable cTable, ExprSpecContext esc) throws LensException {
       Set<ExprSpecContext> evalSet = evaluableExpressions.get(cTable);
       if (evalSet == null) {
         evalSet = new LinkedHashSet<ExprSpecContext>();
@@ -176,14 +169,10 @@ class ExpressionResolver implements ContextRewriter {
       }
       // add optional dimensions involved in expressions
       for (String table : esc.getTblAliasToColumns().keySet()) {
-        try {
-          if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && !srcAlias.equals(table)) {
-            cubeql.addOptionalExprDimTable(table, exprCol.getName(), srcAlias, cTable,
+        if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && !srcAlias.equals(table)) {
+          cubeql.addOptionalExprDimTable(table, exprCol.getName(), srcAlias, cTable,
               esc.getTblAliasToColumns().get(table).toArray(new String[0]));
-            esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
-          }
-        } catch (HiveException e) {
-          throw new SemanticException(e);
+          esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
         }
       }
       evalSet.add(esc);
@@ -228,19 +217,19 @@ class ExpressionResolver implements ContextRewriter {
     @Getter
     private Map<String, Set<String>> tblAliasToColumns = new HashMap<String, Set<String>>();
 
-    ExprSpecContext(ExprSpec exprSpec, CubeQueryContext cubeql) throws SemanticException {
+    ExprSpecContext(ExprSpec exprSpec, CubeQueryContext cubeql) throws LensException {
       // replaces table names in expression with aliases in the query
       finalAST = replaceAlias(exprSpec.getASTNode(), cubeql);
       exprSpecs.add(exprSpec);
     }
     public ExprSpecContext(ExprSpecContext nested, ExprSpec current, ASTNode node,
-      CubeQueryContext cubeql) throws SemanticException {
+      CubeQueryContext cubeql) throws LensException {
       exprSpecs.addAll(nested.exprSpecs);
       exprSpecs.add(current);
       finalAST = replaceAlias(node, cubeql);
     }
     public void replaceAliasInAST(CubeQueryContext cubeql)
-      throws SemanticException {
+      throws LensException {
       AliasReplacer.extractTabAliasForCol(cubeql, this);
       AliasReplacer.replaceAliases(finalAST, 0, cubeql.getColToTableAlias());
     }
@@ -253,7 +242,7 @@ class ExpressionResolver implements ContextRewriter {
       cols.add(column);
     }
 
-    void resolveColumns(CubeQueryContext cubeql) throws SemanticException {
+    void resolveColumns(CubeQueryContext cubeql) throws LensException {
       // finds all columns and table aliases in the expression
       ColumnResolver.getColsForTree(cubeql, finalAST, this);
     }
@@ -371,7 +360,7 @@ class ExpressionResolver implements ContextRewriter {
 
     //updates all expression specs which are evaluable
     public void updateEvaluables(String expr, CandidateTable cTable)
-      throws SemanticException {
+      throws LensException {
       String alias = cubeql.getAliasForTableName(cTable.getBaseTable().getName());
       ExpressionContext ec = getExpressionContext(expr, alias);
       if (cTable.getColumns().contains(expr)) {
@@ -450,7 +439,7 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     public Set<Dimension> rewriteExprCtx(CandidateFact cfact, Map<Dimension, CandidateDim> dimsToQuery,
-      boolean replaceFact) throws SemanticException {
+      boolean replaceFact) throws LensException {
       Set<Dimension> exprDims = new HashSet<Dimension>();
       if (!allExprsQueried.isEmpty()) {
         // pick expressions for fact
@@ -476,7 +465,7 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     private void replacePickedExpressions(CandidateFact cfact, boolean replaceFact)
-      throws SemanticException {
+      throws LensException {
       if (replaceFact) {
         replaceAST(cubeql, cfact.getSelectAST());
         replaceAST(cubeql, cfact.getWhereAST());
@@ -493,14 +482,14 @@ class ExpressionResolver implements ContextRewriter {
       replaceAST(cubeql, cubeql.getOrderByAST());
     }
 
-    private void replaceAST(final CubeQueryContext cubeql, ASTNode node) throws SemanticException {
+    private void replaceAST(final CubeQueryContext cubeql, ASTNode node) throws LensException {
       if (node == null) {
         return;
       }
       // Traverse the tree and resolve expression columns
       HQLParser.bft(node, new ASTNodeVisitor() {
         @Override
-        public void visit(TreeNode visited) throws SemanticException {
+        public void visit(TreeNode visited) throws LensException {
           ASTNode node = visited.getNode();
           int childcount = node.getChildCount();
           for (int i = 0; i < childcount; i++) {
@@ -637,7 +626,7 @@ class ExpressionResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     ExpressionResolverContext exprCtx = cubeql.getExprCtx();
     if (exprCtx == null) {
       exprCtx = new ExpressionResolverContext(cubeql);
@@ -735,7 +724,7 @@ class ExpressionResolver implements ContextRewriter {
     }
   }
 
-  private static ASTNode replaceAlias(final ASTNode expr, final CubeQueryContext cubeql) throws SemanticException {
+  private static ASTNode replaceAlias(final ASTNode expr, final CubeQueryContext cubeql) throws LensException {
     ASTNode finalAST = HQLParser.copyAST(expr);
     HQLParser.bft(finalAST, new ASTNodeVisitor() {
       @Override
@@ -762,14 +751,14 @@ class ExpressionResolver implements ContextRewriter {
   }
 
   private static void replaceColumnInAST(ASTNode expr, final String toReplace, final ASTNode columnAST)
-    throws SemanticException {
+    throws LensException {
     if (expr == null) {
       return;
     }
     // Traverse the tree and resolve expression columns
     HQLParser.bft(expr, new ASTNodeVisitor() {
       @Override
-      public void visit(TreeNode visited) throws SemanticException {
+      public void visit(TreeNode visited) throws LensException {
         ASTNode node = visited.getNode();
         int childcount = node.getChildCount();
         for (int i = 0; i < childcount; i++) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
index 623c58b..6c44233 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
@@ -22,8 +22,7 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.lens.cube.metadata.Dimension;
-
-import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.lens.server.api.error.LensException;
 
 import lombok.extern.slf4j.Slf4j;
 
@@ -37,7 +36,7 @@ public class FactHQLContext extends DimHQLContext {
   private final Set<Dimension> factDims;
 
   FactHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, Set<Dimension> factDims,
-    CubeQueryContext query) throws SemanticException {
+    CubeQueryContext query) throws LensException {
     super(query, dimsToQuery, factDims, fact.getSelectTree(), fact.getWhereTree(), fact.getGroupByTree(), null, fact
       .getHavingTree(), null);
     this.fact = fact;
@@ -55,7 +54,7 @@ public class FactHQLContext extends DimHQLContext {
     return fact;
   }
 
-  protected String getFromTable() throws SemanticException {
+  protected String getFromTable() throws LensException {
     return query.getQBFromString(fact, getDimsToQuery());
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
index 1a1232b..ab7a6d8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
@@ -27,11 +27,11 @@ import org.apache.lens.cube.metadata.DerivedCube;
 import org.apache.lens.cube.metadata.ReferencedDimAtrribute;
 import org.apache.lens.cube.metadata.ReferencedDimAtrribute.ChainRefCol;
 import org.apache.lens.cube.parse.ExpressionResolver.ExprSpecContext;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import com.google.common.collect.ImmutableSet;
 
@@ -41,11 +41,11 @@ import com.google.common.collect.ImmutableSet;
 public class FieldValidator implements ContextRewriter {
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws FieldsCannotBeQueriedTogetherException, SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     validateFields(cubeql);
   }
 
-  public void validateFields(CubeQueryContext cubeql) throws FieldsCannotBeQueriedTogetherException, SemanticException {
+  public void validateFields(CubeQueryContext cubeql) throws LensException {
     CubeInterface cube = cubeql.getCube();
     if (cube == null) {
       return;
@@ -57,7 +57,7 @@ public class FieldValidator implements ContextRewriter {
       try {
         dcubes = cubeql.getMetastoreClient().getAllDerivedQueryableCubes(cube);
       } catch (HiveException e) {
-        throw new SemanticException(e);
+        throw new LensException(e);
       }
 
       ImmutableSet<String> queriedTimeDimCols = cubeql.getQueriedTimeDimCols();
@@ -135,7 +135,7 @@ public class FieldValidator implements ContextRewriter {
                                                  final ASTNode tree,
                                                  final Set<String> dimAttributes,
                                                  final Set<String> chainSourceColumns,
-                                                 final Set<String> nonQueryableColumns) throws SemanticException {
+                                                 final Set<String> nonQueryableColumns) throws LensException {
     if (tree == null || !cubeql.hasCubeInQuery()) {
       return;
     }
@@ -144,7 +144,7 @@ public class FieldValidator implements ContextRewriter {
 
     HQLParser.bft(tree, new HQLParser.ASTNodeVisitor() {
       @Override
-      public void visit(HQLParser.TreeNode treeNode) throws SemanticException {
+      public void visit(HQLParser.TreeNode treeNode) throws LensException {
         ASTNode astNode = treeNode.getNode();
         if (astNode.getToken().getType() == HiveParser.DOT) {
           // At this point alias replacer has run, so all columns are of the type table.column name

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
index 4d2692b..97088a1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
@@ -25,13 +25,13 @@ import java.util.LinkedList;
 import java.util.List;
 
 import org.apache.lens.cube.metadata.AbstractBaseTable;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.antlr.runtime.CommonToken;
 import org.antlr.runtime.tree.Tree;
@@ -56,7 +56,7 @@ class GroupbyResolver implements ContextRewriter {
   }
 
   private void promoteSelect(CubeQueryContext cubeql, List<String> nonMsrNonAggSelExprsWithoutAlias,
-    List<String> groupByExprs) throws SemanticException {
+    List<String> groupByExprs) throws LensException {
     if (!selectPromotionEnabled) {
       return;
     }
@@ -77,7 +77,7 @@ class GroupbyResolver implements ContextRewriter {
             try {
               exprAST = HQLParser.parseExpr(expr);
             } catch (ParseException e) {
-              throw new SemanticException(e);
+              throw new LensException(e);
             }
             ASTNode groupbyAST = cubeql.getGroupByAST();
             if (!isConstantsUsed(exprAST)) {
@@ -124,7 +124,7 @@ class GroupbyResolver implements ContextRewriter {
   }
 
   private void promoteGroupby(CubeQueryContext cubeql, List<String> selectExprs, List<String> groupByExprs)
-    throws SemanticException {
+    throws LensException {
     if (!groupbyPromotionEnabled) {
       return;
     }
@@ -144,7 +144,7 @@ class GroupbyResolver implements ContextRewriter {
         try {
           exprAST = HQLParser.parseExpr(expr);
         } catch (ParseException e) {
-          throw new SemanticException(e);
+          throw new LensException(e);
         }
         addChildAtIndex(index, cubeql.getSelectAST(), exprAST);
         index++;
@@ -168,7 +168,7 @@ class GroupbyResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     // Process Aggregations by making sure that all group by keys are projected;
     // and all projection fields are added to group by keylist;
     List<String> selectExprs = new ArrayList<String>();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java
index 35011e8..78d448a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java
@@ -18,7 +18,8 @@
  */
 package org.apache.lens.cube.parse;
 
-import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.lens.server.api.error.LensException;
+
 
 /**
  * HQL context holding the ql expressions
@@ -29,9 +30,9 @@ public interface HQLContextInterface {
    * Get the HQL query.
    *
    * @return query string
-   * @throws SemanticException
+   * @throws LensException
    */
-  String toHQL() throws SemanticException;
+  String toHQL() throws LensException;
 
   /**
    * Get select expression.

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index 7b99310..586629f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -52,7 +52,7 @@ public final class HQLParser {
   public static final Pattern P_WSPACE = Pattern.compile("\\s+");
 
   public interface ASTNodeVisitor {
-    void visit(TreeNode node) throws SemanticException;
+    void visit(TreeNode node) throws LensException;
   }
 
   public static class TreeNode {
@@ -293,9 +293,9 @@ public final class HQLParser {
    *
    * @param root
    * @param visitor
-   * @throws SemanticException
+   * @throws LensException
    */
-  public static void bft(ASTNode root, ASTNodeVisitor visitor) throws SemanticException {
+  public static void bft(ASTNode root, ASTNodeVisitor visitor) throws LensException {
     if (root == null) {
       throw new NullPointerException("Root cannot be null");
     }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index a6e9340..826a59d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -22,14 +22,15 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.SchemaGraph.TableRelationship;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.CubeQueryContext.OptionalDimCtx;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.*;
 
@@ -421,7 +422,7 @@ class JoinResolver implements ContextRewriter {
     }
 
     public String getFromString(String fromTable, CandidateFact fact, Set<Dimension> qdims,
-      Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql) throws SemanticException {
+      Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql) throws LensException {
       String fromString = fromTable;
       log.info("All paths dump:{}", cubeql.getAutoJoinCtx().getAllPaths());
       if (qdims == null || qdims.isEmpty()) {
@@ -829,7 +830,7 @@ class JoinResolver implements ContextRewriter {
     }
 
     public Set<Dimension> pickOptionalTables(final CandidateFact fact,
-      Set<Dimension> qdims, CubeQueryContext cubeql) throws SemanticException {
+      Set<Dimension> qdims, CubeQueryContext cubeql) throws LensException {
       // Find the min cost join clause and add dimensions in the clause as optional dimensions
       Set<Dimension> joiningOptionalTables = new HashSet<Dimension>();
       if (qdims == null) {
@@ -846,7 +847,7 @@ class JoinResolver implements ContextRewriter {
       }
 
       if (minCostClause == null) {
-        throw new SemanticException(ErrorMsg.NO_JOIN_PATH, qdims.toString(), autoJoinTarget.getName());
+        throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getValue(), qdims.toString(), autoJoinTarget.getName());
       }
 
       log.info("Fact: {} minCostClause:{}", fact, minCostClause);
@@ -876,7 +877,7 @@ class JoinResolver implements ContextRewriter {
           }
         }
         if (cubeql.getCandidateDimTables().get(dim).size() == 0) {
-          throw new SemanticException(ErrorMsg.NO_DIM_HAS_COLUMN, dim.getName(),
+          throw new LensException(LensCubeErrorCode.NO_DIM_HAS_COLUMN.getValue(), dim.getName(),
             minCostClause.chainColumns.get(dim).toString());
         }
       }
@@ -936,28 +937,26 @@ class JoinResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     partialJoinConditions = new HashMap<AbstractCubeTable, String>();
     tableJoinTypeMap = new HashMap<AbstractCubeTable, JoinType>();
-    resolveJoins(cubeql);
+    try {
+      resolveJoins(cubeql);
+    } catch (HiveException e) {
+      throw new LensException(e);
+    }
   }
 
-  private void resolveJoins(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveJoins(CubeQueryContext cubeql) throws LensException, HiveException {
     QB cubeQB = cubeql.getQb();
-    boolean joinResolverDisabled =
-      cubeql.getConf().getBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, CubeQueryConfUtil.DEFAULT_DISABLE_AUTO_JOINS);
+    boolean joinResolverDisabled = cubeql.getConf().getBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS,
+        CubeQueryConfUtil.DEFAULT_DISABLE_AUTO_JOINS);
     if (joinResolverDisabled) {
       if (cubeql.getJoinTree() != null) {
         cubeQB.setQbJoinTree(genJoinTree(cubeQB, cubeql.getJoinTree(), cubeql));
       }
     } else {
-      try {
-        autoResolveJoins(cubeql);
-      } catch (SemanticException e) {
-        throw e;
-      } catch (HiveException e) {
-        throw new SemanticException(e);
-      }
+      autoResolveJoins(cubeql);
     }
   }
 
@@ -980,9 +979,10 @@ class JoinResolver implements ContextRewriter {
    * Resolve joins automatically for the given query.
    *
    * @param cubeql
-   * @throws SemanticException
+   * @throws LensException
+   * @throws HiveException
    */
-  private void autoResolveJoins(CubeQueryContext cubeql) throws HiveException {
+  private void autoResolveJoins(CubeQueryContext cubeql) throws LensException, HiveException {
     // Check if this query needs a join -
     // A join is needed if there is a cube and at least one dimension, or, 0
     // cubes and more than one
@@ -1052,7 +1052,7 @@ class JoinResolver implements ContextRewriter {
           }
           log.warn("No join path between {} and {}", joinee.getName(), target.getName());
           if (cubeql.getDimensions().contains(joinee)) {
-            throw new SemanticException(ErrorMsg.NO_JOIN_PATH, joinee.getName(), target.getName());
+            throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getValue(), joinee.getName(), target.getName());
           } else {
             // if joinee is optional dim table, remove those candidate facts
             Set<CandidateTable> candidates = cubeql.getOptionalDimensionMap().get(joinee).requiredForCandidates;
@@ -1076,19 +1076,19 @@ class JoinResolver implements ContextRewriter {
           }
         }
       } else if (dimensionInJoinChain.get(joinee).size() > 1) {
-        throw new SemanticException("Table " + joinee.getName() + " has "
+        throw new LensException("Table " + joinee.getName() + " has "
           +dimensionInJoinChain.get(joinee).size() + " different paths through joinchains "
           +"(" + dimensionInJoinChain.get(joinee) + ")"
           +" used in query. Couldn't determine which one to use");
       } else {
         // the case when dimension is used only once in all joinchains.
         if (isJoinchainDestination(cubeql, joinee)) {
-          throw new SemanticException("Table " + joinee.getName() + " is getting accessed via two different names: "
+          throw new LensException("Table " + joinee.getName() + " is getting accessed via two different names: "
             + "[" + dimensionInJoinChain.get(joinee).get(0).getName() + ", " + joinee.getName() + "]");
         }
         // table is accessed with chain and no chain
         if (cubeql.getNonChainedDimensions().contains(joinee)) {
-          throw new SemanticException("Table " + joinee.getName() + " is getting accessed via joinchain: "
+          throw new LensException("Table " + joinee.getName() + " is getting accessed via joinchain: "
             + dimensionInJoinChain.get(joinee).get(0).getName() + " and no chain at all");
         }
       }
@@ -1119,7 +1119,7 @@ class JoinResolver implements ContextRewriter {
   }
 
   private void addOptionalTables(CubeQueryContext cubeql, List<SchemaGraph.JoinPath> joinPathList, boolean required)
-    throws SemanticException {
+    throws LensException {
     for (SchemaGraph.JoinPath joinPath : joinPathList) {
       for (TableRelationship rel : joinPath.getEdges()) {
         // Add the joined tables to the queries table sets so that they are
@@ -1129,18 +1129,18 @@ class JoinResolver implements ContextRewriter {
     }
   }
 
-  private void setTarget(CubeMetastoreClient metastore, ASTNode node) throws HiveException {
+  private void setTarget(CubeMetastoreClient metastore, ASTNode node) throws  HiveException, LensException  {
     String targetTableName = HQLParser.getString(HQLParser.findNodeByPath(node, TOK_TABNAME, Identifier));
     if (metastore.isDimension(targetTableName)) {
       target = metastore.getDimension(targetTableName);
     } else if (metastore.isCube(targetTableName)) {
       target = (AbstractCubeTable) metastore.getCube(targetTableName);
     } else {
-      throw new SemanticException(ErrorMsg.JOIN_TARGET_NOT_CUBE_TABLE, targetTableName);
+      throw new LensException(LensCubeErrorCode.JOIN_TARGET_NOT_CUBE_TABLE.getValue(), targetTableName);
     }
   }
 
-  private void searchDimensionTables(CubeMetastoreClient metastore, ASTNode node) throws HiveException {
+  private void searchDimensionTables(CubeMetastoreClient metastore, ASTNode node) throws HiveException, LensException {
     if (node == null) {
       return;
     }
@@ -1195,7 +1195,7 @@ class JoinResolver implements ContextRewriter {
   }
 
   // Recursively find out join conditions
-  private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree, CubeQueryContext cubeql) throws SemanticException {
+  private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree, CubeQueryContext cubeql) throws LensException {
     QBJoinTree joinTree = new QBJoinTree();
     JoinCond[] condn = new JoinCond[1];
 
@@ -1290,7 +1290,7 @@ class JoinResolver implements ContextRewriter {
       cubeql.setJoinCond(joinTree, HQLParser.getString(joinCond));
     } else {
       // No join condition specified. this should be an error
-      throw new SemanticException(ErrorMsg.NO_JOIN_CONDITION_AVAIABLE);
+      throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAIABLE.getValue());
     }
     return joinTree;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
index 7f02ae8..a53e994 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
@@ -21,9 +21,9 @@ package org.apache.lens.cube.parse;
 import java.util.*;
 
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import lombok.extern.slf4j.Slf4j;
 
@@ -36,7 +36,7 @@ class LeastPartitionResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getCube() != null && !cubeql.getCandidateFactSets().isEmpty()) {
       Map<Set<CandidateFact>, Integer> factPartCount = new HashMap<Set<CandidateFact>, Integer>();
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestDimensionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestDimensionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestDimensionResolver.java
index 4ae6226..82410d3 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestDimensionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestDimensionResolver.java
@@ -22,9 +22,9 @@ import java.util.*;
 
 import org.apache.lens.cube.metadata.Dimension;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import lombok.extern.slf4j.Slf4j;
 
@@ -38,7 +38,7 @@ class LightestDimensionResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     if (!cubeql.getCandidateDimTables().isEmpty()) {
       for (Map.Entry<Dimension, Set<CandidateDim>> entry : cubeql.getCandidateDimTables().entrySet()) {
         if (entry.getValue().isEmpty()) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java
index fba682d..97accbb 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java
@@ -22,9 +22,9 @@ package org.apache.lens.cube.parse;
 import java.util.*;
 
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import lombok.extern.slf4j.Slf4j;
 
@@ -37,7 +37,7 @@ public class LightestFactResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getCube() != null && !cubeql.getCandidateFactSets().isEmpty()) {
       Map<Set<CandidateFact>, Double> factWeightMap = new HashMap<Set<CandidateFact>, Double>();
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
index 4d8cbf3..13f1aa4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
@@ -29,7 +29,6 @@ import org.apache.lens.cube.metadata.timeline.RangesPartitionTimeline;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import com.google.common.collect.Maps;
 
@@ -47,7 +46,7 @@ class MaxCoveringFactResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) {
     if (failOnPartialData) {
       // if fail on partial data is true, by the time this resolver starts,
       // all candidate fact sets are covering full time range. We can avoid

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
index b5f5adc..d8515d8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
@@ -20,11 +20,11 @@ package org.apache.lens.cube.parse;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.Dimension;
+import org.apache.lens.server.api.error.LensException;
 
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import com.google.common.collect.Lists;
 
@@ -40,7 +40,7 @@ class MultiFactHQLContext extends SimpleHQLContext {
   private Map<CandidateFact, Set<Dimension>> factDimMap;
 
   MultiFactHQLContext(Set<CandidateFact> facts, Map<Dimension, CandidateDim> dimsToQuery,
-    Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws SemanticException {
+    Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws LensException {
     super();
     this.query = query;
     this.facts = facts;
@@ -48,7 +48,7 @@ class MultiFactHQLContext extends SimpleHQLContext {
     this.factDimMap = factDimMap;
   }
 
-  protected void setMissingExpressions() throws SemanticException {
+  protected void setMissingExpressions() throws LensException {
     setSelect(getSelectString());
     setFrom(getFromString());
     setWhere(getWhereString());
@@ -73,11 +73,11 @@ class MultiFactHQLContext extends SimpleHQLContext {
     return null;
   }
 
-  public String toHQL() throws SemanticException {
+  public String toHQL() throws LensException {
     return query.getInsertClause() + super.toHQL();
   }
 
-  private String getSelectString() throws SemanticException {
+  private String getSelectString() throws LensException {
     Map<Integer, List<Integer>> selectToFactIndex =
       new HashMap<Integer, List<Integer>>(query.getSelectAST().getChildCount());
     int fi = 1;
@@ -93,8 +93,8 @@ class MultiFactHQLContext extends SimpleHQLContext {
     StringBuilder select = new StringBuilder();
     for (int i = 0; i < query.getSelectAST().getChildCount(); i++) {
       if (selectToFactIndex.get(i) == null) {
-        throw new SemanticException(ErrorMsg.EXPRESSION_NOT_IN_ANY_FACT, HQLParser.getString((ASTNode) query
-          .getSelectAST().getChild(i)));
+        throw new LensException(LensCubeErrorCode.EXPRESSION_NOT_IN_ANY_FACT.getValue(),
+            HQLParser.getString((ASTNode) query.getSelectAST().getChild(i)));
       }
       if (selectToFactIndex.get(i).size() == 1) {
         select.append("mq").append(selectToFactIndex.get(i).get(0)).append(".")
@@ -124,7 +124,7 @@ class MultiFactHQLContext extends SimpleHQLContext {
     return facts;
   }
 
-  private String getFromString() throws SemanticException {
+  private String getFromString() throws LensException {
     StringBuilder fromBuilder = new StringBuilder();
     int aliasCount = 1;
     Iterator<CandidateFact> iter = facts.iterator();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
index f3f3f78..067a37a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
@@ -22,8 +22,9 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import lombok.extern.slf4j.Slf4j;
 
@@ -70,11 +71,12 @@ public abstract class SimpleHQLContext implements HQLContextInterface {
    * <p></p>
    * Leaving this empty implementation for the case of all expressions being passed in constructor. If other
    * constructors are used the missing expressions should be set here
+   * @throws LensException
    */
-  protected void setMissingExpressions() throws SemanticException {
+  protected void setMissingExpressions() throws LensException {
   }
 
-  public String toHQL() throws SemanticException {
+  public String toHQL() throws LensException {
     setMissingExpressions();
     String qfmt = getQueryFormat();
     Object[] queryTreeStrings = getQueryTreeStrings();
@@ -85,7 +87,7 @@ public abstract class SimpleHQLContext implements HQLContextInterface {
     return baseQuery;
   }
 
-  private String[] getQueryTreeStrings() throws SemanticException {
+  private String[] getQueryTreeStrings() throws LensException {
     List<String> qstrs = new ArrayList<String>();
     qstrs.add(select);
     qstrs.add(from);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
index b63111b..60b2dde 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
@@ -22,10 +22,11 @@ import java.util.Map;
 
 import org.apache.lens.cube.metadata.Dimension;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 /**
  * HQL context class which passes down all query strings to come from DimOnlyHQLContext and works with fact being
@@ -39,13 +40,13 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
   private String storageAlias;
 
   SingleFactHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
-    throws SemanticException {
+    throws LensException {
     super(dimsToQuery, query);
     this.fact = fact;
   }
 
   SingleFactHQLContext(CandidateFact fact, String storageAlias, Map<Dimension, CandidateDim> dimsToQuery,
-      CubeQueryContext query, String whereClause) throws SemanticException {
+      CubeQueryContext query, String whereClause) throws LensException {
     super(dimsToQuery, query, whereClause);
     this.fact = fact;
     this.storageAlias = storageAlias;
@@ -56,7 +57,7 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
     return fact;
   }
 
-  static void addRangeClauses(CubeQueryContext query, CandidateFact fact) throws SemanticException {
+  static void addRangeClauses(CubeQueryContext query, CandidateFact fact) throws LensException {
     if (fact != null) {
       // resolve timerange positions and replace it by corresponding where
       // clause
@@ -70,7 +71,7 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
             try {
               rangeAST = HQLParser.parseExpr(rangeWhere);
             } catch (ParseException e) {
-              throw new SemanticException(e);
+              throw new LensException(e);
             }
             rangeAST.setParent(range.getParent());
             range.getParent().setChild(range.getChildIndex(), rangeAST);
@@ -83,7 +84,7 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
 
 
   @Override
-  protected String getFromTable() throws SemanticException {
+  protected String getFromTable() throws LensException {
     if (getQuery().getAutoJoinCtx() != null && getQuery().getAutoJoinCtx().isJoinsResolved()) {
       if (storageAlias != null) {
         return storageAlias;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
index 4ad2f1f..15a98dd 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
@@ -23,8 +23,7 @@ import java.util.ArrayList;
 import java.util.Map;
 
 import org.apache.lens.cube.metadata.Dimension;
-
-import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.lens.server.api.error.LensException;
 
 import lombok.Getter;
 
@@ -36,14 +35,14 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
   private CandidateFact fact = null;
 
   SingleFactMultiStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
-    throws SemanticException {
+    throws LensException {
     this.query = query;
     this.fact = fact;
     setUnionContexts(fact, dimsToQuery, query);
   }
 
   private void setUnionContexts(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
-    throws SemanticException {
+    throws LensException {
     hqlContexts = new ArrayList<HQLContextInterface>();
     String alias = getQuery().getAliasForTableName(getQuery().getCube().getName());
     for (String storageTable : fact.getStorageTables()) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index aa76c0c..58d0fa7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -42,7 +42,6 @@ import org.apache.lens.server.api.error.LensException;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.util.ReflectionUtils;
 
 import com.google.common.collect.Lists;
@@ -126,7 +125,7 @@ class StorageTableResolver implements ContextRewriter {
   Map<String, List<String>> storagePartMap = new HashMap<String, List<String>>();
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     client = cubeql.getMetastoreClient();
 
     switch (phase) {
@@ -159,7 +158,7 @@ class StorageTableResolver implements ContextRewriter {
     phase = phase.next();
   }
 
-  private void resolveDimStorageTablesAndPartitions(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveDimStorageTablesAndPartitions(CubeQueryContext cubeql) throws LensException {
     Set<Dimension> allDims = new HashSet<Dimension>(cubeql.getDimensions());
     allDims.addAll(cubeql.getOptionalDimensions());
     for (Dimension dim : allDims) {
@@ -234,7 +233,7 @@ class StorageTableResolver implements ContextRewriter {
   }
 
   // Resolves all the storage table names, which are valid for each updatePeriod
-  private void resolveFactStorageTableNames(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveFactStorageTableNames(CubeQueryContext cubeql) throws LensException {
     Iterator<CandidateFact> i = cubeql.getCandidateFacts().iterator();
     while (i.hasNext()) {
       CubeFactTable fact = i.next().fact;
@@ -317,7 +316,7 @@ class StorageTableResolver implements ContextRewriter {
   }
 
   private TimeRange getFallbackRange(TimeRange range, CandidateFact cfact, CubeQueryContext cubeql)
-    throws SemanticException {
+    throws LensException {
     Cube baseCube = cubeql.getBaseCube();
     try {
       ArrayList<String> tableNames = Lists.newArrayList(cfact.fact.getName(), cubeql.getCube().getName());
@@ -349,11 +348,11 @@ class StorageTableResolver implements ContextRewriter {
         .toDate(diff1.negativeOffsetFrom(range.getToDate()))
         .partitionColumn(fallbackPartCol).build();
     } catch (HiveException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
   }
 
-  private void resolveFactStoragePartitions(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveFactStoragePartitions(CubeQueryContext cubeql) throws LensException {
     // Find candidate tables wrt supported storages
     Iterator<CandidateFact> i = cubeql.getCandidateFacts().iterator();
     Map<TimeRange, String> whereClauseForFallback = new LinkedHashMap<TimeRange, String>();
@@ -505,12 +504,12 @@ class StorageTableResolver implements ContextRewriter {
 
   private Set<FactPartition> getPartitions(CubeFactTable fact, TimeRange range,
     HashMap<String, SkipStorageCause> skipStorageCauses,
-    PartitionRangesForPartitionColumns missingPartitions) throws SemanticException {
+    PartitionRangesForPartitionColumns missingPartitions) throws LensException {
     try {
       return getPartitions(fact, range, getValidUpdatePeriods(fact), true, skipStorageCauses,
         missingPartitions);
     } catch (Exception e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
index 03732cb..5444e71 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
@@ -24,12 +24,12 @@ import java.util.Calendar;
 import java.util.Date;
 import java.util.TreeSet;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.UpdatePeriod;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 
@@ -104,13 +104,13 @@ public class TimeRange {
 
   }
 
-  public void validate() throws SemanticException {
+  public void validate() throws LensException {
     if (partitionColumn == null || fromDate == null || toDate == null || fromDate.equals(toDate)) {
-      throw new SemanticException(ErrorMsg.INVALID_TIME_RANGE);
+      throw new LensException(LensCubeErrorCode.INVALID_TIME_RANGE.getValue());
     }
 
     if (fromDate.after(toDate)) {
-      throw new SemanticException(ErrorMsg.FROM_AFTER_TO, fromDate.toString(), toDate.toString());
+      throw new LensException(LensCubeErrorCode.FROM_AFTER_TO.getValue(), fromDate.toString(), toDate.toString());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeWriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeWriter.java
index 12acf98..08f957e 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeWriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeWriter.java
@@ -21,10 +21,9 @@ package org.apache.lens.cube.parse;
 import java.util.Set;
 
 import org.apache.lens.cube.metadata.FactPartition;
-
-import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.lens.server.api.error.LensException;
 
 public interface TimeRangeWriter {
   String getTimeRangeWhereClause(CubeQueryContext cubeQueryContext, String tableName, Set<FactPartition> parts)
-    throws SemanticException;
+    throws LensException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
index 91c0c75..fb1c89e 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
@@ -26,6 +26,7 @@ import java.util.*;
 
 import org.apache.lens.cube.error.ColUnAvailableInTimeRange;
 import org.apache.lens.cube.error.ColUnAvailableInTimeRangeException;
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.AbstractCubeTable;
 import org.apache.lens.cube.metadata.CubeColumn;
 import org.apache.lens.cube.metadata.Dimension;
@@ -35,9 +36,7 @@ import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 
 import com.google.common.collect.Lists;
@@ -52,7 +51,7 @@ class TimerangeResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException, LensException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getCube() == null) {
       return;
     }
@@ -62,19 +61,19 @@ class TimerangeResolver implements ContextRewriter {
   }
 
 
-  private void extractTimeRange(CubeQueryContext cubeql) throws SemanticException {
+  private void extractTimeRange(CubeQueryContext cubeql) throws LensException {
     // get time range -
     // Time range should be direct child of where condition
     // TOK_WHERE.TOK_FUNCTION.Identifier Or, it should be right hand child of
     // AND condition TOK_WHERE.KW_AND.TOK_FUNCTION.Identifier
     if (cubeql.getWhereAST() == null || cubeql.getWhereAST().getChildCount() < 1) {
-      throw new SemanticException(ErrorMsg.NO_TIMERANGE_FILTER);
+      throw new LensException(LensCubeErrorCode.NO_TIMERANGE_FILTER.getValue());
     }
     searchTimeRanges(cubeql.getWhereAST(), cubeql, null, 0);
   }
 
   private void searchTimeRanges(ASTNode root, CubeQueryContext cubeql, ASTNode parent, int childIndex)
-    throws SemanticException {
+    throws LensException {
     if (root == null) {
       return;
     } else if (root.getToken().getType() == TOK_FUNCTION) {
@@ -104,7 +103,7 @@ class TimerangeResolver implements ContextRewriter {
   }
 
   private void processTimeRangeFunction(CubeQueryContext cubeql, ASTNode timenode, ASTNode parent, int childIndex)
-    throws SemanticException {
+    throws LensException {
     TimeRange.TimeRangeBuilder builder = TimeRange.getBuilder();
     builder.astNode(timenode);
     builder.parent(parent);
@@ -113,7 +112,7 @@ class TimerangeResolver implements ContextRewriter {
     String timeDimName = getColumnName((ASTNode) timenode.getChild(1));
 
     if (!cubeql.getCube().getTimedDimensions().contains(timeDimName)) {
-      throw new SemanticException(ErrorMsg.NOT_A_TIMED_DIMENSION, timeDimName);
+      throw new LensException(LensCubeErrorCode.NOT_A_TIMED_DIMENSION.getValue(), timeDimName);
     }
     // Replace timeDimName with column which is used for partitioning. Assume
     // the same column
@@ -143,7 +142,7 @@ class TimerangeResolver implements ContextRewriter {
     cubeql.getTimeRanges().add(range);
   }
 
-  private void doColLifeValidation(CubeQueryContext cubeql) throws SemanticException,
+  private void doColLifeValidation(CubeQueryContext cubeql) throws LensException,
     ColUnAvailableInTimeRangeException {
     Set<String> cubeColumns = cubeql.getColumnsQueried(cubeql.getCube().getName());
     if (cubeColumns == null || cubeColumns.isEmpty()) {
@@ -156,7 +155,7 @@ class TimerangeResolver implements ContextRewriter {
       for (TimeRange range : cubeql.getTimeRanges()) {
         if (column == null) {
           if (!cubeql.getCube().getTimedDimensions().contains(col)) {
-            throw new SemanticException(ErrorMsg.NOT_A_CUBE_COLUMN, col);
+            throw new LensException(LensCubeErrorCode.NOT_A_CUBE_COLUMN.getValue(), col);
           }
           continue;
         }
@@ -214,8 +213,9 @@ class TimerangeResolver implements ContextRewriter {
                 joinPathIterator.remove();
                 if (joinPaths.isEmpty()) {
                   // This dimension doesn't have any paths left
-                  throw new SemanticException(ErrorMsg.NO_JOIN_PATH, "No valid join path available for dimension "
-                    + dimension + " which would satisfy time range " + range.getFromDate() + "-" + range.getToDate());
+                  throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getValue(),
+                      "No valid join path available for dimension " + dimension + " which would satisfy time range "
+                          + range.getFromDate() + "-" + range.getToDate());
                 }
               }
             } // End loop to remove path

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
index e6ed86b..9005826 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
@@ -24,9 +24,10 @@ import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.commons.lang.NotImplementedException;
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import lombok.AllArgsConstructor;
 import lombok.Getter;
@@ -42,7 +43,7 @@ public abstract class UnionHQLContext implements HQLContextInterface {
   List<HQLContextInterface> hqlContexts = new ArrayList<HQLContextInterface>();
 
   @Override
-  public String toHQL() throws SemanticException {
+  public String toHQL() throws LensException {
     Set<String> queryParts = new LinkedHashSet<String>();
     for (HQLContextInterface ctx : hqlContexts) {
       queryParts.add(ctx.toHQL());

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/ValidationRule.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ValidationRule.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ValidationRule.java
index 9567845..558e411 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ValidationRule.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ValidationRule.java
@@ -18,8 +18,9 @@
  */
 package org.apache.lens.cube.parse;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 public abstract class ValidationRule {
   Configuration conf;
@@ -29,7 +30,7 @@ public abstract class ValidationRule {
     this.conf = conf;
   }
 
-  public abstract boolean validate(CubeQueryContext ctx) throws SemanticException;
+  public abstract boolean validate(CubeQueryContext ctx) throws  LensException;
 
   public String getErrorMessage() {
     return error;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index ae8984f..13eca27 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -746,7 +746,8 @@ public class CubeTestSetup {
       .createDerivedCube(TEST_CUBE_NAME, DERIVED_CUBE_NAME, measures, dimensions, new HashMap<String, String>(), 5L);
   }
 
-  private void createBaseAndDerivedCubes(CubeMetastoreClient client) throws HiveException, ParseException {
+  private void createBaseAndDerivedCubes(CubeMetastoreClient client)
+    throws HiveException, ParseException, LensException {
     Set<CubeMeasure> cubeMeasures2 = new HashSet<CubeMeasure>(cubeMeasures);
     Set<CubeDimAttribute> cubeDimensions2 = new HashSet<CubeDimAttribute>(cubeDimensions);
     cubeMeasures2.add(new ColumnMeasure(new FieldSchema("msr11", "int", "first measure")));
@@ -906,7 +907,7 @@ public class CubeTestSetup {
     createBaseCubeFacts(client);
   }
 
-  private void createBaseCubeFacts(CubeMetastoreClient client) throws HiveException {
+  private void createBaseCubeFacts(CubeMetastoreClient client) throws HiveException, LensException {
 
     Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
     Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
index 501a4b0..0fea9f1 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
@@ -34,7 +34,6 @@ import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
@@ -50,7 +49,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithDimensionAndMeasure() throws SemanticException, ParseException, LensException {
+  public void testQueryWithDimensionAndMeasure() throws ParseException, LensException {
 
     /* If all the queried dimensions are present in a derived cube, and one of the queried measure is not present in
     the same derived cube, then query shall be disallowed.
@@ -63,7 +62,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithDimensionAndMeasureInExpression() throws SemanticException, ParseException, LensException {
+  public void testQueryWithDimensionAndMeasureInExpression() throws ParseException, LensException {
 
     /* If all the queried dimensions are present in a derived cube, and one of the queried measure is not present in
     the same derived cube, then query shall be disallowed.
@@ -76,7 +75,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithDimensionInExpressionAndMeasure() throws SemanticException, ParseException, LensException {
+  public void testQueryWithDimensionInExpressionAndMeasure() throws ParseException, LensException {
 
     /* If all the queried dimensions are present in a derived cube, and one of the queried measure is not present in
     the same derived cube, then query shall be disallowed.
@@ -89,7 +88,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithDimensionAndMeasureInExpressions() throws SemanticException, ParseException, LensException {
+  public void testQueryWithDimensionAndMeasureInExpressions() throws ParseException, LensException {
 
     /* If all the queried dimensions are present in a derived cube, and one of the queried measure is not present in
     the same derived cube, then query shall be disallowed.
@@ -102,7 +101,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithChainReferencedDimensionAttributeAndMeasure() throws SemanticException, ParseException,
+  public void testQueryWithChainReferencedDimensionAttributeAndMeasure() throws ParseException,
       LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in select. If the
@@ -118,7 +117,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithChainReferencedDimensionAttributeAndExprMeasure() throws SemanticException, ParseException,
+  public void testQueryWithChainReferencedDimensionAttributeAndExprMeasure() throws ParseException,
       LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in select. If the
@@ -134,7 +133,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithDimExprWithChainRefAndExprMeasure() throws SemanticException, ParseException,
+  public void testQueryWithDimExprWithChainRefAndExprMeasure() throws ParseException,
       LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in select. If the
@@ -150,7 +149,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithMeasureAndChainReferencedDimAttributeInFilter() throws SemanticException, ParseException,
+  public void testQueryWithMeasureAndChainReferencedDimAttributeInFilter() throws ParseException,
       LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in filter. If the
@@ -166,7 +165,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithExprMeasureAndChainReferencedDimAttributeInFilter() throws SemanticException, ParseException,
+  public void testQueryWithExprMeasureAndChainReferencedDimAttributeInFilter() throws ParseException,
       LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in filter. If the
@@ -182,7 +181,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithExprMeasureAndDimExprWithChainRefInFilter() throws SemanticException, ParseException,
+  public void testQueryWithExprMeasureAndDimExprWithChainRefInFilter() throws ParseException,
       LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in filter. If the
@@ -199,7 +198,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithOnlyMeasure() throws ParseException, SemanticException, LensException {
+  public void testQueryWithOnlyMeasure() throws ParseException, LensException {
 
     /* A query which contains only measure should pass, if the measure is present in some derived cube.
     msr1 is present in one of the derived cubes, hence query shall pass without any exception. */
@@ -208,7 +207,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithOnlyExprMeasure() throws ParseException, SemanticException, LensException {
+  public void testQueryWithOnlyExprMeasure() throws ParseException, LensException {
 
     /* A query which contains only measure should pass, if the measure is present in some derived cube.
     roundedmsr1 ( an expression over msr1) is present in one of the derived cubes, hence query shall pass without
@@ -219,7 +218,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
 
   @Test
   public void testQueryWithMeasureAndChainReferencedDimAttributeInCaseStatement() throws ParseException,
-      SemanticException, LensException {
+      LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in case statement.
     A query which contains such a dim attribute and a measure is allowed even if the source column of the used dim
@@ -233,7 +232,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithDimAttributesNotInSameDerviedCube() throws ParseException, SemanticException, LensException {
+  public void testQueryWithDimAttributesNotInSameDerviedCube() throws ParseException, LensException {
 
     /* dim2 and countryid are not present in the same derived cube, hence query should be disallowed */
 
@@ -243,7 +242,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
 
   @Test
   public void testQueryWithDimExpressionssNotInSameDerviedCube()
-    throws ParseException, SemanticException, LensException {
+    throws ParseException, LensException {
 
     /* dim2, source columns of cubestate and countryid are not present in the same derived cube, hence query should be
      *  disallowed */
@@ -253,7 +252,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithMeasureNotInAnyDerviedCube() throws ParseException, SemanticException, LensException {
+  public void testQueryWithMeasureNotInAnyDerviedCube() throws ParseException, LensException {
 
     /* newmeasure is not present in any derived cube, hence the query should be disallowed. */
 
@@ -262,7 +261,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithExprMeasureNotInAnyDerviedCube() throws ParseException, SemanticException, LensException {
+  public void testQueryWithExprMeasureNotInAnyDerviedCube() throws ParseException, LensException {
 
     /* newexpr : expression over newmeasure is not present in any derived cube, hence the query should be disallowed. */
 
@@ -271,7 +270,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithReferencedDimAttributeAndMeasure() throws SemanticException, ParseException,
+  public void testQueryWithReferencedDimAttributeAndMeasure() throws ParseException,
       LensException {
 
     /* In this query a referenced dimension attribute is used in select statement. If the source column for such a
@@ -288,8 +287,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWtihTimeDimAndReplaceTimeDimSwitchTrue() throws ParseException, SemanticException,
-      LensException {
+  public void testQueryWtihTimeDimAndReplaceTimeDimSwitchTrue() throws ParseException, LensException {
 
     /* If a time dimension and measure are not present in the same derived cube, then query shall be disallowed.
 
@@ -309,8 +307,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWtihTimeDimAndReplaceTimeDimSwitchFalse() throws ParseException, SemanticException,
-      LensException {
+  public void testQueryWtihTimeDimAndReplaceTimeDimSwitchFalse() throws ParseException, LensException {
 
     /* If a time dimension and measure are not present in the same derived cube, then query shall be disallowed.
 
@@ -330,13 +327,13 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   private void testFieldsCannotBeQueriedTogetherError(final String testQuery, final List<String> conflictingFields)
-    throws ParseException, SemanticException, LensException {
+    throws ParseException, LensException {
     testFieldsCannotBeQueriedTogetherError(testQuery, conflictingFields, conf);
   }
 
   private void testFieldsCannotBeQueriedTogetherError(final String testQuery, final List<String> conflictingFields,
       final Configuration queryConf)
-    throws ParseException, SemanticException, LensException {
+    throws ParseException, LensException {
 
     try {
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
index 13058e2..9791502 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
@@ -26,7 +26,6 @@ import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.testng.Assert;
 import org.testng.annotations.BeforeTest;
@@ -147,7 +146,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
   }
 
   @Test
-  public void testDimOnlyDistinctQuery() throws SemanticException, ParseException, LensException {
+  public void testDimOnlyDistinctQuery() throws ParseException, LensException {
 
     conf.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, false);
 
@@ -196,7 +195,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
   }
 
   @Test
-  public void testAggregateResolverOff() throws SemanticException, ParseException, LensException {
+  public void testAggregateResolverOff() throws ParseException, LensException {
     Configuration conf2 = getConfWithStorages("C1,C2");
     conf2.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, true);
 
@@ -218,7 +217,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     rawFactSelectionTests(conf2);
   }
 
-  private void aggregateFactSelectionTests(Configuration conf) throws SemanticException, ParseException, LensException {
+  private void aggregateFactSelectionTests(Configuration conf) throws ParseException, LensException {
     String query = "SELECT count(distinct cityid) from testcube where " + TWO_DAYS_RANGE;
     CubeQueryContext cubeql = rewriteCtx(query, conf);
     String hQL = cubeql.toHQL();
@@ -262,7 +261,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     compareQueries(expectedQL, hQL);
   }
 
-  private void rawFactSelectionTests(Configuration conf) throws SemanticException, ParseException, LensException {
+  private void rawFactSelectionTests(Configuration conf) throws ParseException, LensException {
     // Check a query with non default aggregate function
     String query = "SELECT cityid, avg(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE;
     CubeQueryContext cubeql = rewriteCtx(query, conf);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 0d0b927..9120a70 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -32,15 +32,14 @@ import java.util.*;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.time.DateUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.antlr.runtime.CommonToken;
 import org.testng.Assert;
@@ -66,16 +65,16 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
 
   @Test
   public void testColumnErrors() throws Exception {
-    SemanticException e;
+    LensException e;
 
-    e = getSemanticExceptionInRewrite("select msr11 + msr2 from basecube" + " where " + TWO_DAYS_RANGE, conf);
-    assertEquals(e.getCanonicalErrorMsg().getErrorCode(),
-      ErrorMsg.EXPRESSION_NOT_IN_ANY_FACT.getErrorCode());
+    e = getLensExceptionInRewrite("select msr11 + msr2 from basecube" + " where " + TWO_DAYS_RANGE, conf);
+    assertEquals(e.getErrorCode(),
+        LensCubeErrorCode.EXPRESSION_NOT_IN_ANY_FACT.getValue());
     // no fact has the all the dimensions queried
-    e = getSemanticExceptionInRewrite("select dim1, test_time_dim, msr3, msr13 from basecube where "
+    e = getLensExceptionInRewrite("select dim1, test_time_dim, msr3, msr13 from basecube where "
       + TWO_DAYS_RANGE, conf);
-    assertEquals(e.getCanonicalErrorMsg().getErrorCode(),
-      ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE.getErrorCode());
+    assertEquals(e.getErrorCode(),
+        LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getValue());
     PruneCauses.BriefAndDetailedError pruneCauses = extractPruneCause(e);
     String regexp = String.format(CandidateTablePruneCause.CandidateTablePruneCode.COLUMN_NOT_FOUND.errorFormat,
       "Column Sets: (.*?)", "queriable together");
@@ -438,8 +437,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     // If going to fallback timedim, and partitions are missing, then error should be missing partition on that
     conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C4");
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
-    SemanticException exc =
-      getSemanticExceptionInRewrite("cube select msr12 from basecube where " + TWO_DAYS_RANGE, conf);
+    LensException exc =
+      getLensExceptionInRewrite("cube select msr12 from basecube where " + TWO_DAYS_RANGE, conf);
     PruneCauses.BriefAndDetailedError pruneCause = extractPruneCause(exc);
     assertTrue(pruneCause.getBrief().contains("Missing partitions"));
     assertEquals(pruneCause.getDetails().get("testfact2_base").iterator().next().getCause(), MISSING_PARTITIONS);