You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by am...@apache.org on 2015/08/19 11:01:17 UTC

[3/3] incubator-lens git commit: LENS-187 : Move cube specific error message codes from Hive code to Lens

LENS-187 : Move cube specific error message codes from Hive code to Lens


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/3dc348ac
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/3dc348ac
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/3dc348ac

Branch: refs/heads/master
Commit: 3dc348ac2793b5394f0ef58bba6da6f9933c5e1d
Parents: c368595
Author: Sushil Mohanty <su...@gmail.com>
Authored: Wed Aug 19 14:30:35 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Aug 19 14:30:35 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/lens-errors.conf    | 162 +++++++++++++++++++
 .../lens/cube/error/LensCubeErrorCode.java      |  29 +++-
 .../cube/parse/AbridgedTimeRangeWriter.java     |   8 +-
 .../lens/cube/parse/AggregateResolver.java      |  14 +-
 .../apache/lens/cube/parse/AliasReplacer.java   |  21 +--
 .../lens/cube/parse/BetweenTimeRangeWriter.java |  13 +-
 .../apache/lens/cube/parse/CandidateFact.java   |  20 +--
 .../lens/cube/parse/CandidateTableResolver.java |  41 ++---
 .../lens/cube/parse/CheckColumnMapping.java     |   5 +-
 .../apache/lens/cube/parse/CheckTableNames.java |   5 +-
 .../apache/lens/cube/parse/ColumnResolver.java  |  18 +--
 .../apache/lens/cube/parse/ContextRewriter.java |   6 +-
 .../lens/cube/parse/CubeQueryContext.java       |  73 ++++-----
 .../lens/cube/parse/CubeQueryRewriter.java      |  30 ++--
 .../lens/cube/parse/CubeSemanticAnalyzer.java   |  41 +----
 .../org/apache/lens/cube/parse/DateUtil.java    |  24 +--
 .../cube/parse/DenormalizationResolver.java     |  26 +--
 .../apache/lens/cube/parse/DimHQLContext.java   |  12 +-
 .../lens/cube/parse/DimOnlyHQLContext.java      |  11 +-
 .../lens/cube/parse/ExpressionResolver.java     |  75 ++++-----
 .../apache/lens/cube/parse/FactHQLContext.java  |   7 +-
 .../apache/lens/cube/parse/FieldValidator.java  |  12 +-
 .../apache/lens/cube/parse/GroupbyResolver.java |  12 +-
 .../lens/cube/parse/HQLContextInterface.java    |   7 +-
 .../org/apache/lens/cube/parse/HQLParser.java   |   6 +-
 .../apache/lens/cube/parse/JoinResolver.java    |  58 +++----
 .../lens/cube/parse/LeastPartitionResolver.java |   4 +-
 .../cube/parse/LightestDimensionResolver.java   |   4 +-
 .../lens/cube/parse/LightestFactResolver.java   |   4 +-
 .../cube/parse/MaxCoveringFactResolver.java     |   3 +-
 .../lens/cube/parse/MultiFactHQLContext.java    |  18 +--
 .../lens/cube/parse/SimpleHQLContext.java       |  10 +-
 .../lens/cube/parse/SingleFactHQLContext.java   |  13 +-
 .../parse/SingleFactMultiStorageHQLContext.java |   7 +-
 .../lens/cube/parse/StorageTableResolver.java   |  17 +-
 .../org/apache/lens/cube/parse/TimeRange.java   |  10 +-
 .../apache/lens/cube/parse/TimeRangeWriter.java |   5 +-
 .../lens/cube/parse/TimerangeResolver.java      |  24 +--
 .../apache/lens/cube/parse/UnionHQLContext.java |   5 +-
 .../apache/lens/cube/parse/ValidationRule.java  |   5 +-
 .../apache/lens/cube/parse/CubeTestSetup.java   |   5 +-
 .../FieldsCannotBeQueriedTogetherTest.java      |  47 +++---
 .../lens/cube/parse/TestAggregateResolver.java  |   9 +-
 .../lens/cube/parse/TestBaseCubeQueries.java    |  21 ++-
 .../lens/cube/parse/TestCubeRewriter.java       |  58 +++----
 .../apache/lens/cube/parse/TestDateUtil.java    |   6 +-
 .../cube/parse/TestDenormalizationResolver.java |  36 ++---
 .../lens/cube/parse/TestExpressionResolver.java |  32 ++--
 .../lens/cube/parse/TestJoinResolver.java       |  35 ++--
 .../lens/cube/parse/TestQueryRewrite.java       |  39 +++--
 .../lens/cube/parse/TestRewriterPlan.java       |   4 +-
 .../lens/cube/parse/TestTimeRangeExtractor.java |  14 +-
 .../lens/cube/parse/TestTimeRangeResolver.java  |   7 +-
 .../lens/cube/parse/TestTimeRangeWriter.java    |  17 +-
 .../parse/TestTimeRangeWriterWithQuery.java     |  10 +-
 .../driver/jdbc/TestColumnarSQLRewriter.java    |  54 +++----
 .../lens/server/api/error/LensException.java    |  12 +-
 .../UnSupportedQuerySubmitOpException.java      |   2 +-
 .../apache/lens/server/rewrite/RewriteUtil.java |  14 +-
 .../lens/server/query/TestQueryService.java     |  10 +-
 .../lens/server/rewrite/TestRewriting.java      |  15 +-
 61 files changed, 741 insertions(+), 571 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-api/src/main/resources/lens-errors.conf
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/lens-errors.conf b/lens-api/src/main/resources/lens-errors.conf
index e6715b6..3fb191e 100644
--- a/lens-api/src/main/resources/lens-errors.conf
+++ b/lens-api/src/main/resources/lens-errors.conf
@@ -94,6 +94,168 @@ lensCubeErrors = [
     payloadClass = org.apache.lens.cube.error.ConflictingFields
   }
 
+  {
+   errorCode = 3004
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No reference column available for : %s "
+  }
+
+ {
+   errorCode = 3005
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "More than one cube accessed in query : %s and %s"
+ }
+
+ {
+   errorCode = 3006
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Neither cube nor dimensions accessed in the query"
+ }
+
+ {
+   errorCode = 3007
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No timerange filter specified"
+ }
+
+ {
+   errorCode = 3008
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "%s is not timed dimension"
+ }
+
+ {
+   errorCode = 3009
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Error in parsing input date format. Expected format %s, date provided %s"
+ }
+
+ {
+   errorCode = 3010
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Date value cannot be null or empty"
+ }
+
+ {
+   errorCode = 3011
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Invalid time unit %s"
+ }
+
+ {
+   errorCode = 3012
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Selecting all columns is not supported"
+ }
+
+ {
+   errorCode = 3013
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Ambiguous column %s, in dimensions %s and %s"
+ }
+
+ {
+   errorCode = 3014
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Ambiguous column %s, in cube: %s and in dimension: %s"
+ }
+
+ {
+   errorCode = 3015
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Could not find the table containing column: %s"
+ }
+
+ {
+   errorCode = 3016
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "%s : Not a cube column"
+ }
+
+ {
+   errorCode = 3017
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No candidate fact table available to answer the query, because %s"
+ }
+
+ {
+   errorCode = 3018
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No join condition available"
+ }
+
+ {
+   errorCode = 3019
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No storage table available for candidate fact: %s"
+ }
+
+ {
+   errorCode = 3020
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Default aggregate is not set for measure: %s"
+ }
+
+ {
+   errorCode = 3021
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Invalid time range"
+ }
+
+ {
+   errorCode = 3022
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "From date: %s  should be smaller than to date: %s"
+ }
+
+ {
+   errorCode = 3023
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No join path defined from %s to %s"
+ }
+
+ {
+   errorCode = 3024
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Join target table: %s is neither dimension nor cube"
+ }
+
+ {
+   errorCode = 3025
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No fact table has the queried columns : %s"
+ }
+
+ {
+   errorCode = 3026
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No candidate dimension storage tables for dimension because %s"
+ }
+
+ {
+   errorCode = 3027
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No dimension table has the queried columns for %s, columns: %s"
+ }
+
+ {
+   errorCode = 3028
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No candidate dimension table available for %s to answer the query, because %s"
+ }
+
+ {
+   errorCode = 3029
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Configured timerange writer cannot be used. Reason %s"
+ }
+
+ {
+   errorCode = 3030
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Expression %s is not available in any fact"
+ }
+
 ]
 
 # Overriding errors in lens-errors.conf via lens-errors-override.conf:

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
index 0006b22..1fe74e2 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
@@ -22,7 +22,34 @@ public enum LensCubeErrorCode {
 
   SYNTAX_ERROR(3001),
   COLUMN_UNAVAILABLE_IN_TIME_RANGE(3002),
-  FIELDS_CANNOT_BE_QUERIED_TOGETHER(3003);
+  FIELDS_CANNOT_BE_QUERIED_TOGETHER(3003),
+  NO_REF_COL_AVAILABLE(3004),
+  MORE_THAN_ONE_CUBE(3005),
+  NEITHER_CUBE_NOR_DIMENSION(3006),
+  NO_TIMERANGE_FILTER(3007),
+  NOT_A_TIMED_DIMENSION(3008),
+  WRONG_TIME_RANGE_FORMAT(3009),
+  NULL_DATE_VALUE(3010),
+  INVALID_TIME_UNIT(3011),
+  ALL_COLUMNS_NOT_SUPPORTED(3012),
+  AMBIGOUS_DIM_COLUMN(3013),
+  AMBIGOUS_CUBE_COLUMN(3014),
+  COLUMN_NOT_FOUND(3015),
+  NOT_A_CUBE_COLUMN(3016),
+  NO_CANDIDATE_FACT_AVAILABLE(3017),
+  NO_JOIN_CONDITION_AVAIABLE(3018),
+  NO_STORAGE_TABLE_AVAIABLE(3019),
+  NO_DEFAULT_AGGREGATE(3020),
+  INVALID_TIME_RANGE(3021),
+  FROM_AFTER_TO(3022),
+  NO_JOIN_PATH(3023),
+  JOIN_TARGET_NOT_CUBE_TABLE(3024),
+  NO_FACT_HAS_COLUMN(3025),
+  NO_CANDIDATE_DIM_STORAGE_TABLES(3026),
+  NO_DIM_HAS_COLUMN(3027),
+  NO_CANDIDATE_DIM_AVAILABLE(3028),
+  CANNOT_USE_TIMERANGE_WRITER(3029),
+  EXPRESSION_NOT_IN_ANY_FACT(3030);
 
   public int getValue() {
     return this.errorCode;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java
index d5276e0..2caea56 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java
@@ -22,9 +22,9 @@ package org.apache.lens.cube.parse;
 import java.util.*;
 
 import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
@@ -42,12 +42,12 @@ public class AbridgedTimeRangeWriter implements TimeRangeWriter {
    * @param tableName
    * @param parts
    * @return
-   * @throws SemanticException
+   * @throws LensException
    */
   @Override
   public String getTimeRangeWhereClause(CubeQueryContext cubeQueryContext,
     String tableName,
-    Set<FactPartition> parts) throws SemanticException {
+    Set<FactPartition> parts) throws LensException {
     if (parts == null || parts.isEmpty()) {
       return "";
     }
@@ -73,7 +73,7 @@ public class AbridgedTimeRangeWriter implements TimeRangeWriter {
 
   private String getClause(CubeQueryContext cubeQueryContext,
     String tableName,
-    Set<FactPartition> parts) throws SemanticException {
+    Set<FactPartition> parts) throws LensException {
     Map<String, List<String>> partFilterMap = new HashMap<String, List<String>>();
     List<String> allTimeRangeFilters = new ArrayList<String>();
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
index f880495..9c0f936 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
@@ -24,16 +24,16 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TABLE_OR_COL;
 
 import java.util.Iterator;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.CubeMeasure;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.antlr.runtime.CommonToken;
 
@@ -51,7 +51,7 @@ class AggregateResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getCube() == null) {
       return;
     }
@@ -110,7 +110,7 @@ class AggregateResolver implements ContextRewriter {
   // We need to traverse the clause looking for eligible measures which can be
   // wrapped inside aggregates
   // We have to skip any columns that are already inside an aggregate UDAF
-  private String resolveClause(CubeQueryContext cubeql, ASTNode clause) throws SemanticException {
+  private String resolveClause(CubeQueryContext cubeql, ASTNode clause) throws LensException {
 
     if (clause == null) {
       return null;
@@ -123,7 +123,7 @@ class AggregateResolver implements ContextRewriter {
     return HQLParser.getString(clause);
   }
 
-  private void transform(CubeQueryContext cubeql, ASTNode parent, ASTNode node, int nodePos) throws SemanticException {
+  private void transform(CubeQueryContext cubeql, ASTNode parent, ASTNode node, int nodePos) throws LensException {
     if (node == null) {
       return;
     }
@@ -158,7 +158,7 @@ class AggregateResolver implements ContextRewriter {
 
   // Wrap an aggregate function around the node if its a measure, leave it
   // unchanged otherwise
-  private ASTNode wrapAggregate(CubeQueryContext cubeql, ASTNode node) throws SemanticException {
+  private ASTNode wrapAggregate(CubeQueryContext cubeql, ASTNode node) throws LensException {
 
     String tabname = null;
     String colname;
@@ -188,7 +188,7 @@ class AggregateResolver implements ContextRewriter {
         String aggregateFn = measure.getAggregate();
 
         if (StringUtils.isBlank(aggregateFn)) {
-          throw new SemanticException(ErrorMsg.NO_DEFAULT_AGGREGATE, colname);
+          throw new LensException(LensCubeErrorCode.NO_DEFAULT_AGGREGATE.getValue(), colname);
         }
         ASTNode fnroot = new ASTNode(new CommonToken(HiveParser.TOK_FUNCTION));
         fnroot.setParent(node.getParent());

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
index 9309307..98e38d5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
@@ -25,15 +25,15 @@ import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.CubeInterface;
 import org.apache.lens.cube.metadata.Dimension;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.antlr.runtime.CommonToken;
 
@@ -50,7 +50,7 @@ class AliasReplacer implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     Map<String, String> colToTableAlias = cubeql.getColToTableAlias();
 
     extractTabAliasForCol(cubeql);
@@ -93,9 +93,9 @@ class AliasReplacer implements ContextRewriter {
   /**
    * Figure out queried dim attributes and measures from the cube query context
    * @param cubeql
-   * @throws SemanticException
+   * @throws LensException
    */
-  private void findDimAttributesAndMeasures(CubeQueryContext cubeql) throws SemanticException {
+  private void findDimAttributesAndMeasures(CubeQueryContext cubeql) throws LensException {
     CubeInterface cube = cubeql.getCube();
     if (cube != null) {
       Set<String> cubeColsQueried = cubeql.getColumnsQueried(cube.getName());
@@ -119,11 +119,11 @@ class AliasReplacer implements ContextRewriter {
     }
   }
 
-  private void extractTabAliasForCol(CubeQueryContext cubeql) throws SemanticException {
+  private void extractTabAliasForCol(CubeQueryContext cubeql) throws LensException {
     extractTabAliasForCol(cubeql, cubeql);
   }
 
-  static void extractTabAliasForCol(CubeQueryContext cubeql, TrackQueriedColumns tqc) throws SemanticException {
+  static void extractTabAliasForCol(CubeQueryContext cubeql, TrackQueriedColumns tqc) throws LensException {
     Map<String, String> colToTableAlias = cubeql.getColToTableAlias();
     Set<String> columns = tqc.getTblAliasToColumns().get(CubeQueryContext.DEFAULT_TABLE);
     if (columns == null) {
@@ -145,19 +145,20 @@ class AliasReplacer implements ContextRewriter {
           if (!inCube) {
             String prevDim = colToTableAlias.get(col.toLowerCase());
             if (prevDim != null && !prevDim.equals(dim.getName())) {
-              throw new SemanticException(ErrorMsg.AMBIGOUS_DIM_COLUMN, col, prevDim, dim.getName());
+              throw new LensException(LensCubeErrorCode.AMBIGOUS_DIM_COLUMN.getValue(), col, prevDim, dim.getName());
             }
             String dimAlias = cubeql.getAliasForTableName(dim.getName());
             colToTableAlias.put(col.toLowerCase(), dimAlias);
             tqc.addColumnsQueried(dimAlias, col.toLowerCase());
           } else {
             // throw error because column is in both cube and dimension table
-            throw new SemanticException(ErrorMsg.AMBIGOUS_CUBE_COLUMN, col, cubeql.getCube().getName(), dim.getName());
+            throw new LensException(LensCubeErrorCode.AMBIGOUS_CUBE_COLUMN.getValue(), col,
+                cubeql.getCube().getName(), dim.getName());
           }
         }
       }
       if (colToTableAlias.get(col.toLowerCase()) == null) {
-        throw new SemanticException(ErrorMsg.COLUMN_NOT_FOUND, col);
+        throw new LensException(LensCubeErrorCode.COLUMN_NOT_FOUND.getValue(), col);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
index 6c85c2d..4bd7cc8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
@@ -22,10 +22,10 @@ import java.util.Iterator;
 import java.util.Set;
 import java.util.TreeSet;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.server.api.error.LensException;
 
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 /**
  * Writes partitions queried in timerange as between clause.
@@ -34,7 +34,7 @@ public class BetweenTimeRangeWriter implements TimeRangeWriter {
 
   @Override
   public String getTimeRangeWhereClause(CubeQueryContext cubeQueryContext, String tableName,
-    Set<FactPartition> rangeParts) throws SemanticException {
+    Set<FactPartition> rangeParts) throws LensException {
     if (rangeParts.size() == 0) {
       return "";
     }
@@ -52,18 +52,19 @@ public class BetweenTimeRangeWriter implements TimeRangeWriter {
       while (it.hasNext()) {
         FactPartition part = it.next();
         if (part.hasContainingPart()) {
-          throw new SemanticException(ErrorMsg.CANNOT_USE_TIMERANGE_WRITER, "Partition has containing part");
+          throw new LensException(LensCubeErrorCode.CANNOT_USE_TIMERANGE_WRITER.getValue(),
+              "Partition has containing part");
         }
         if (first == null) {
           first = part;
         } else {
           // validate partcol, update period are same for both
           if (!first.getPartCol().equalsIgnoreCase(part.getPartCol())) {
-            throw new SemanticException(ErrorMsg.CANNOT_USE_TIMERANGE_WRITER,
+            throw new LensException(LensCubeErrorCode.CANNOT_USE_TIMERANGE_WRITER.getValue(),
               "Part columns are different in partitions");
           }
           if (!first.getPeriod().equals(part.getPeriod())) {
-            throw new SemanticException(ErrorMsg.CANNOT_USE_TIMERANGE_WRITER,
+            throw new LensException(LensCubeErrorCode.CANNOT_USE_TIMERANGE_WRITER.getValue(),
               "Partitions are in different update periods");
           }
         }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 6c88fb3..8a6aa00 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -28,6 +28,7 @@ import org.apache.lens.cube.metadata.CubeInterface;
 import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
 import org.apache.lens.cube.parse.HQLParser.TreeNode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -35,7 +36,6 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.antlr.runtime.CommonToken;
@@ -118,7 +118,7 @@ public class CandidateFact implements CandidateTable {
     numQueriedParts += incr;
   }
 
-  private void updateTimeRanges(ASTNode root, ASTNode parent, int childIndex) throws SemanticException {
+  private void updateTimeRanges(ASTNode root, ASTNode parent, int childIndex) throws LensException {
     if (root == null) {
       return;
     } else if (root.getToken().getType() == TOK_FUNCTION) {
@@ -135,7 +135,7 @@ public class CandidateFact implements CandidateTable {
   }
 
   // copy ASTs from CubeQueryContext
-  public void copyASTs(CubeQueryContext cubeql) throws SemanticException {
+  public void copyASTs(CubeQueryContext cubeql) throws LensException {
     this.selectAST = HQLParser.copyAST(cubeql.getSelectAST());
     this.whereAST = HQLParser.copyAST(cubeql.getWhereAST());
     if (cubeql.getJoinTree() != null) {
@@ -155,7 +155,7 @@ public class CandidateFact implements CandidateTable {
     return getStorgeWhereClauseMap().get(storageTable);
   }
 
-  public void updateTimeranges(CubeQueryContext cubeql) throws SemanticException {
+  public void updateTimeranges(CubeQueryContext cubeql) throws LensException {
     // Update WhereAST with range clause
     // resolve timerange positions and replace it by corresponding where clause
     for (int i = 0; i < cubeql.getTimeRanges().size(); i++) {
@@ -166,7 +166,7 @@ public class CandidateFact implements CandidateTable {
         try {
           rangeAST = HQLParser.parseExpr(rangeWhere);
         } catch (ParseException e) {
-          throw new SemanticException(e);
+          throw new LensException(e);
         }
         rangeAST.setParent(timenodes.get(i).parent);
         timenodes.get(i).parent.setChild(timenodes.get(i).childIndex, rangeAST);
@@ -178,9 +178,9 @@ public class CandidateFact implements CandidateTable {
    * Update the ASTs to include only the fields queried from this fact, in all the expressions
    *
    * @param cubeql
-   * @throws SemanticException
+   * @throws LensException
    */
-  public void updateASTs(CubeQueryContext cubeql) throws SemanticException {
+  public void updateASTs(CubeQueryContext cubeql) throws LensException {
     Set<String> cubeCols = cubeql.getCube().getAllFieldNames();
 
     // update select AST with selected fields
@@ -223,7 +223,7 @@ public class CandidateFact implements CandidateTable {
   }
 
   private Set<String> getColsInExpr(final CubeQueryContext cubeql, final Set<String> cubeCols,
-    ASTNode expr) throws SemanticException {
+    ASTNode expr) throws LensException {
     final Set<String> cubeColsInExpr = new HashSet<String>();
     HQLParser.bft(expr, new ASTNodeVisitor() {
       @Override
@@ -405,7 +405,7 @@ public class CandidateFact implements CandidateTable {
     return null;
   }
 
-  public Set<String> getTimePartCols(CubeQueryContext query) throws SemanticException {
+  public Set<String> getTimePartCols(CubeQueryContext query) throws LensException {
     Set<String> cubeTimeDimensions = baseTable.getTimedDimensions();
     Set<String> timePartDimensions = new HashSet<String>();
     String singleStorageTable = storageTables.iterator().next();
@@ -413,7 +413,7 @@ public class CandidateFact implements CandidateTable {
     try {
       partitionKeys = query.getMetastoreClient().getTable(singleStorageTable).getPartitionKeys();
     } catch (HiveException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
     for (FieldSchema fs : partitionKeys) {
       if (cubeTimeDimensions.contains(CubeQueryContext.getTimeDimOfPartitionColumn(baseTable, fs.getName()))) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
index 3e73d02..69fbcc5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
@@ -20,18 +20,18 @@ package org.apache.lens.cube.parse;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.CubeQueryContext.OptionalDimCtx;
 import org.apache.lens.cube.parse.CubeQueryContext.QueriedExprColumn;
 import org.apache.lens.cube.parse.ExpressionResolver.ExprSpecContext;
 import org.apache.lens.cube.parse.ExpressionResolver.ExpressionContext;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import com.google.common.collect.Sets;
 
@@ -56,7 +56,7 @@ class CandidateTableResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     if (checkForQueriedColumns) {
       log.debug("Dump queried columns:{}", cubeql.getTblAliasToColumns());
       populateCandidateTables(cubeql);
@@ -87,12 +87,12 @@ class CandidateTableResolver implements ContextRewriter {
     }
   }
 
-  private void populateCandidateTables(CubeQueryContext cubeql) throws SemanticException {
+  private void populateCandidateTables(CubeQueryContext cubeql) throws LensException {
     try {
       if (cubeql.getCube() != null) {
         List<CubeFactTable> factTables = cubeql.getMetastoreClient().getAllFacts(cubeql.getCube());
         if (factTables.isEmpty()) {
-          throw new SemanticException(ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE, cubeql.getCube().getName()
+          throw new LensException(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getValue(), cubeql.getCube().getName()
             + " does not have any facts");
         }
         for (CubeFactTable fact : factTables) {
@@ -108,11 +108,11 @@ class CandidateTableResolver implements ContextRewriter {
         }
       }
     } catch (HiveException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
   }
 
-  private void populateDimTables(Dimension dim, CubeQueryContext cubeql, boolean optional) throws SemanticException {
+  private void populateDimTables(Dimension dim, CubeQueryContext cubeql, boolean optional) throws LensException {
     if (cubeql.getCandidateDimTables().get(dim) != null) {
       return;
     }
@@ -122,7 +122,7 @@ class CandidateTableResolver implements ContextRewriter {
       List<CubeDimensionTable> dimtables = cubeql.getMetastoreClient().getAllDimensionTables(dim);
       if (dimtables.isEmpty()) {
         if (!optional) {
-          throw new SemanticException(ErrorMsg.NO_CANDIDATE_DIM_AVAILABLE, dim.getName(),
+          throw new LensException(LensCubeErrorCode.NO_CANDIDATE_DIM_AVAILABLE.getValue(), dim.getName(),
             "Dimension tables do not exist");
         } else {
           log.info("Not considering optional dimension {}  as, No dimension tables exist", dim);
@@ -135,7 +135,7 @@ class CandidateTableResolver implements ContextRewriter {
       }
       log.info("Populated candidate dims: {} for {}", cubeql.getCandidateDimTables().get(dim), dim);
     } catch (HiveException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
   }
 
@@ -198,7 +198,7 @@ class CandidateTableResolver implements ContextRewriter {
     }
   }
 
-  private void resolveCandidateFactTables(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveCandidateFactTables(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getCube() != null) {
       String str = cubeql.getConf().get(CubeQueryConfUtil.getValidFactTablesKey(cubeql.getCube().getName()));
       List<String> validFactTables =
@@ -284,7 +284,7 @@ class CandidateTableResolver implements ContextRewriter {
       Set<String> dimExprs = new HashSet<String>(cubeql.getQueriedExprs());
       dimExprs.removeAll(cubeql.getQueriedExprsWithMeasures());
       if (cubeql.getCandidateFacts().size() == 0) {
-        throw new SemanticException(ErrorMsg.NO_FACT_HAS_COLUMN,
+        throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getValue(),
           (!queriedDimAttrs.isEmpty() ? queriedDimAttrs.toString() : "")
           +  (!dimExprs.isEmpty() ? dimExprs.toString() : ""));
       }
@@ -308,14 +308,14 @@ class CandidateTableResolver implements ContextRewriter {
         String msrString = (!queriedMsrs.isEmpty() ? queriedMsrs.toString() : "")
           + (!cubeql.getQueriedExprsWithMeasures().isEmpty() ? cubeql.getQueriedExprsWithMeasures().toString() : "");
         if (cfactset.isEmpty()) {
-          throw new SemanticException(ErrorMsg.NO_FACT_HAS_COLUMN, msrString);
+          throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getValue(), msrString);
         }
         cubeql.getCandidateFactSets().addAll(cfactset);
         cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.columnNotFound(queriedMsrs,
           cubeql.getQueriedExprsWithMeasures()));
 
         if (cubeql.getCandidateFacts().size() == 0) {
-          throw new SemanticException(ErrorMsg.NO_FACT_HAS_COLUMN, msrString);
+          throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getValue(), msrString);
         }
       }
     }
@@ -361,7 +361,7 @@ class CandidateTableResolver implements ContextRewriter {
     return cfactset;
   }
 
-  private void resolveCandidateDimTablesForJoinsAndDenorms(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveCandidateDimTablesForJoinsAndDenorms(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getAutoJoinCtx() == null) {
       return;
     }
@@ -431,8 +431,8 @@ class CandidateTableResolver implements ContextRewriter {
           OptionalDimCtx optdim = cubeql.getOptionalDimensionMap().get(dim);
           if ((cubeql.getDimensions() != null && cubeql.getDimensions().contains(dim))
             || (optdim != null && optdim.isRequiredInJoinChain)) {
-            throw new SemanticException(ErrorMsg.NO_DIM_HAS_COLUMN, dim.getName(), cubeql.getAutoJoinCtx()
-              .getAllJoinPathColumnsOfTable(dim).toString());
+            throw new LensException(LensCubeErrorCode.NO_DIM_HAS_COLUMN.getValue(), dim.getName(),
+                cubeql.getAutoJoinCtx().getAllJoinPathColumnsOfTable(dim).toString());
           } else {
             // remove it from optional tables
             log.info("Not considering optional dimension {} as, No dimension table has the queried columns:{}"
@@ -444,7 +444,7 @@ class CandidateTableResolver implements ContextRewriter {
     }
   }
 
-  private void resolveCandidateFactTablesForJoins(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveCandidateFactTablesForJoins(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getAutoJoinCtx() == null) {
       return;
     }
@@ -474,7 +474,8 @@ class CandidateTableResolver implements ContextRewriter {
         }
       }
       if (cubeql.getCandidateFacts().size() == 0) {
-        throw new SemanticException(ErrorMsg.NO_FACT_HAS_COLUMN, colSet == null ? "NULL" : colSet.toString());
+        throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getValue(),
+            colSet == null ? "NULL" : colSet.toString());
       }
     }
   }
@@ -633,7 +634,7 @@ class CandidateTableResolver implements ContextRewriter {
     }
   }
 
-  private void resolveCandidateDimTables(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveCandidateDimTables(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getDimensions().size() != 0) {
       for (Dimension dim : cubeql.getDimensions()) {
         // go over the columns accessed in the query and find out which tables
@@ -668,7 +669,7 @@ class CandidateTableResolver implements ContextRewriter {
         }
 
         if (cubeql.getCandidateDimTables().get(dim).size() == 0) {
-          throw new SemanticException(ErrorMsg.NO_DIM_HAS_COLUMN, dim.getName(), cubeql
+          throw new LensException(LensCubeErrorCode.NO_DIM_HAS_COLUMN.getValue(), dim.getName(), cubeql
             .getColumnsQueried(dim.getName()).toString());
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckColumnMapping.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckColumnMapping.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckColumnMapping.java
index 59e4fd3..33a5dda 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckColumnMapping.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckColumnMapping.java
@@ -18,8 +18,9 @@
  */
 package org.apache.lens.cube.parse;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 public class CheckColumnMapping extends ValidationRule {
 
@@ -28,7 +29,7 @@ public class CheckColumnMapping extends ValidationRule {
   }
 
   @Override
-  public boolean validate(CubeQueryContext ctx) throws SemanticException {
+  public boolean validate(CubeQueryContext ctx) throws LensException {
     // TODO
     return true;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckTableNames.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckTableNames.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckTableNames.java
index fce1a04..8586262 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckTableNames.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckTableNames.java
@@ -18,8 +18,9 @@
  */
 package org.apache.lens.cube.parse;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 public class CheckTableNames extends ValidationRule {
 
@@ -28,7 +29,7 @@ public class CheckTableNames extends ValidationRule {
   }
 
   @Override
-  public boolean validate(CubeQueryContext ctx) throws SemanticException {
+  public boolean validate(CubeQueryContext ctx) throws LensException {
     // TODO
     return true;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
index 2ff5959..dfe0c32 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
@@ -23,13 +23,13 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 import java.util.HashSet;
 import java.util.Set;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
 import org.apache.lens.cube.parse.HQLParser.TreeNode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import com.google.common.base.Optional;
 
@@ -39,11 +39,11 @@ class ColumnResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     extractColumns(cubeql);
   }
 
-  private void extractColumns(CubeQueryContext cubeql) throws SemanticException {
+  private void extractColumns(CubeQueryContext cubeql) throws LensException {
     // Check if its 'select * from...'
     ASTNode selTree = cubeql.getSelectAST();
     if (selTree.getChildCount() == 1) {
@@ -55,7 +55,7 @@ class ColumnResolver implements ContextRewriter {
       if (star != null) {
         int starType = star.getToken().getType();
         if (TOK_FUNCTIONSTAR == starType || TOK_ALLCOLREF == starType) {
-          throw new SemanticException(ErrorMsg.ALL_COLUMNS_NOT_SUPPORTED);
+          throw new LensException(LensCubeErrorCode.ALL_COLUMNS_NOT_SUPPORTED.getValue());
         }
       }
     }
@@ -70,7 +70,7 @@ class ColumnResolver implements ContextRewriter {
     for (String table : cubeql.getTblAliasToColumns().keySet()) {
       if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table)) {
         if (!cubeql.addQueriedTable(table)) {
-          throw new SemanticException(ErrorMsg.NEITHER_CUBE_NOR_DIMENSION);
+          throw new LensException(LensCubeErrorCode.NEITHER_CUBE_NOR_DIMENSION.getValue());
         }
       }
     }
@@ -78,7 +78,7 @@ class ColumnResolver implements ContextRewriter {
 
   // finds columns in AST passed.
   static void getColsForTree(final CubeQueryContext cubeql, ASTNode tree, final TrackQueriedColumns tqc)
-    throws SemanticException {
+    throws LensException {
     if (tree == null) {
       return;
     }
@@ -124,7 +124,7 @@ class ColumnResolver implements ContextRewriter {
   // added
   // only if timerange clause shouldn't be replaced with its correspodning
   // partition column
-  private void getColsForWhereTree(final CubeQueryContext cubeql) throws SemanticException {
+  private void getColsForWhereTree(final CubeQueryContext cubeql) throws LensException {
     if (cubeql.getWhereAST() == null) {
       return;
     }
@@ -152,7 +152,7 @@ class ColumnResolver implements ContextRewriter {
   // and user given alias is the final alias of the expression.
   private static final String SELECT_ALIAS_PREFIX = "expr";
 
-  private void getColsForSelectTree(final CubeQueryContext cubeql) throws SemanticException {
+  private void getColsForSelectTree(final CubeQueryContext cubeql) throws LensException {
     int exprInd = 1;
     for (int i = 0; i < cubeql.getSelectAST().getChildCount(); i++) {
       ASTNode selectExpr = (ASTNode) cubeql.getSelectAST().getChild(i);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/ContextRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ContextRewriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ContextRewriter.java
index feb26d7..073bc02 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ContextRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ContextRewriter.java
@@ -20,14 +20,12 @@ package org.apache.lens.cube.parse;
 
 import org.apache.lens.server.api.error.LensException;
 
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-
 interface ContextRewriter {
   /**
    * Rewrites and updates {@link CubeQueryContext}
    *
    * @param cubeql CubeQueryContext
-   * @throws SemanticException
+   * @throws LensException
    */
-  void rewriteContext(CubeQueryContext cubeql) throws SemanticException, LensException;
+  void rewriteContext(CubeQueryContext cubeql) throws LensException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index d753e3f..16429f0 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -29,14 +29,15 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.*;
 
@@ -166,7 +167,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     new HashMap<Dimension, PruneCauses<CubeDimensionTable>>();
 
   public CubeQueryContext(ASTNode ast, QB qb, Configuration queryConf, HiveConf metastoreConf)
-    throws SemanticException {
+    throws LensException {
     this.ast = ast;
     this.qb = qb;
     this.conf = queryConf;
@@ -175,7 +176,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     try {
       metastoreClient = CubeMetastoreClient.getInstance(metastoreConf);
     } catch (HiveException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
     if (qb.getParseInfo().getWhrForClause(clauseName) != null) {
       this.whereAST = qb.getParseInfo().getWhrForClause(clauseName);
@@ -209,7 +210,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return dimensions != null && !dimensions.isEmpty();
   }
 
-  private void extractMetaTables() throws SemanticException {
+  private void extractMetaTables() throws LensException {
     List<String> tabAliases = new ArrayList<String>(qb.getTabAliases());
     Set<String> missing = new HashSet<String>();
     for (String alias : tabAliases) {
@@ -223,12 +224,12 @@ public class CubeQueryContext implements TrackQueriedColumns {
       boolean added = addJoinChain(alias, false);
       if (!added) {
         log.info("Queried tables do not exist. Missing table:{}", alias);
-        throw new SemanticException(ErrorMsg.NEITHER_CUBE_NOR_DIMENSION);
+        throw new LensException(LensCubeErrorCode.NEITHER_CUBE_NOR_DIMENSION.getValue());
       }
     }
   }
 
-  private boolean addJoinChain(String alias, boolean isOptional) throws SemanticException {
+  private boolean addJoinChain(String alias, boolean isOptional) throws LensException {
     boolean retVal = false;
     String aliasLowerCaseStr = alias.toLowerCase();
     JoinChain joinchain = null;
@@ -258,7 +259,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
       boolean added = addQueriedTable(alias, destTable, isOptional, true);
       if (!added) {
         log.info("Queried tables do not exist. Missing tables:{}", destTable);
-        throw new SemanticException(ErrorMsg.NEITHER_CUBE_NOR_DIMENSION);
+        throw new LensException(LensCubeErrorCode.NEITHER_CUBE_NOR_DIMENSION.getValue());
       }
       log.info("Added join chain for {}", destTable);
       return true;
@@ -267,11 +268,11 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return retVal;
   }
 
-  public boolean addQueriedTable(String alias) throws SemanticException {
+  public boolean addQueriedTable(String alias) throws LensException {
     return addQueriedTable(alias, false);
   }
 
-  private boolean addQueriedTable(String alias, boolean isOptional) throws SemanticException {
+  private boolean addQueriedTable(String alias, boolean isOptional) throws LensException {
     String tblName = qb.getTabNameForAlias(alias);
     if (tblName == null) {
       tblName = alias;
@@ -295,10 +296,10 @@ public class CubeQueryContext implements TrackQueriedColumns {
    * @param isChainedDimension pass true when you're adding the dimension as a joinchain destination, pass false when
    *                           this table is mentioned by name in the user query
    * @return true if added
-   * @throws SemanticException
+   * @throws LensException
    */
   private boolean addQueriedTable(String alias, String tblName, boolean isOptional, boolean isChainedDimension)
-    throws SemanticException {
+    throws LensException {
     alias = alias.toLowerCase();
     if (cubeTbls.containsKey(alias)) {
       return true;
@@ -307,7 +308,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
       if (metastoreClient.isCube(tblName)) {
         if (cube != null) {
           if (!cube.getName().equalsIgnoreCase(tblName)) {
-            throw new SemanticException(ErrorMsg.MORE_THAN_ONE_CUBE, cube.getName(), tblName);
+            throw new LensException(LensCubeErrorCode.MORE_THAN_ONE_CUBE.getValue(), cube.getName(), tblName);
           }
         }
         cube = metastoreClient.getCube(tblName);
@@ -398,22 +399,22 @@ public class CubeQueryContext implements TrackQueriedColumns {
     boolean isRequiredInJoinChain = false;
   }
 
-  public void addOptionalJoinDimTable(String alias, boolean isRequired) throws SemanticException {
+  public void addOptionalJoinDimTable(String alias, boolean isRequired) throws LensException {
     addOptionalDimTable(alias, null, isRequired, null, false, (String[])null);
   }
 
   public void addOptionalExprDimTable(String dimAlias, String queriedExpr, String srcTableAlias,
-    CandidateTable candidate, String... cols) throws SemanticException {
+    CandidateTable candidate, String... cols) throws LensException {
     addOptionalDimTable(dimAlias, candidate, false, queriedExpr, false, srcTableAlias, cols);
   }
 
   public void addOptionalDimTable(String alias, CandidateTable candidate, boolean isRequiredInJoin, String cubeCol,
-    boolean isRef, String... cols) throws SemanticException {
+    boolean isRef, String... cols) throws LensException {
     addOptionalDimTable(alias, candidate, isRequiredInJoin, cubeCol, true, null, cols);
   }
 
   private void addOptionalDimTable(String alias, CandidateTable candidate, boolean isRequiredInJoin, String cubeCol,
-    boolean isRef, String tableAlias, String... cols) throws SemanticException {
+    boolean isRef, String tableAlias, String... cols) throws LensException {
     alias = alias.toLowerCase();
     try {
       if (!addQueriedTable(alias, true)) {
@@ -446,7 +447,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
           (cubeCol == null ? "" : " for column:" + cubeCol),  isRef);
       }
     } catch (HiveException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
   }
 
@@ -679,14 +680,14 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return StorageUtil.getWhereClause(dimsToQuery.get(cubeTbls.get(alias)), alias);
   }
 
-  String getQBFromString(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery) throws SemanticException {
+  String getQBFromString(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
     String fromString = null;
     if (getJoinTree() == null) {
       if (cube != null) {
         fromString = fact.getStorageString(getAliasForTableName(cube.getName()));
       } else {
         if (dimensions.size() != 1) {
-          throw new SemanticException(ErrorMsg.NO_JOIN_CONDITION_AVAIABLE);
+          throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAIABLE.getValue());
         }
         Dimension dim = dimensions.iterator().next();
         fromString = dimsToQuery.get(dim).getStorageString(getAliasForTableName(dim.getName()));
@@ -700,7 +701,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
   }
 
   private void getQLString(QBJoinTree joinTree, StringBuilder builder, CandidateFact fact,
-    Map<Dimension, CandidateDim> dimsToQuery) throws SemanticException {
+    Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
     String joiningTable = null;
     if (joinTree.getBaseSrc()[0] == null) {
       if (joinTree.getJoinSrc() != null) {
@@ -739,11 +740,11 @@ public class CubeQueryContext implements TrackQueriedColumns {
         dimsToQuery.get(cubeTbls.get(joiningTable)).setWhereClauseAdded();
       }
     } else {
-      throw new SemanticException(ErrorMsg.NO_JOIN_CONDITION_AVAIABLE);
+      throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAIABLE.getValue());
     }
   }
 
-  void setNonexistingParts(Map<String, Set<String>> nonExistingParts) throws SemanticException {
+  void setNonexistingParts(Map<String, Set<String>> nonExistingParts) throws LensException {
     if (!nonExistingParts.isEmpty()) {
       ByteArrayOutputStream out = null;
       String partsStr;
@@ -753,13 +754,13 @@ public class CubeQueryContext implements TrackQueriedColumns {
         mapper.writeValue(out, nonExistingParts);
         partsStr = out.toString("UTF-8");
       } catch (Exception e) {
-        throw new SemanticException("Error writing non existing parts", e);
+        throw new LensException("Error writing non existing parts", e);
       } finally {
         if (out != null) {
           try {
             out.close();
           } catch (IOException e) {
-            throw new SemanticException(e);
+            throw new LensException(e);
           }
         }
       }
@@ -773,7 +774,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return conf.get(CubeQueryConfUtil.NON_EXISTING_PARTITIONS);
   }
 
-  private Map<Dimension, CandidateDim> pickCandidateDimsToQuery(Set<Dimension> dimensions) throws SemanticException {
+  private Map<Dimension, CandidateDim> pickCandidateDimsToQuery(Set<Dimension> dimensions) throws LensException {
     Map<Dimension, CandidateDim> dimsToQuery = new HashMap<Dimension, CandidateDim>();
     if (!dimensions.isEmpty()) {
       for (Dimension dim : dimensions) {
@@ -792,18 +793,18 @@ public class CubeQueryContext implements TrackQueriedColumns {
               mapper.writeValue(out, dimPruningMsgs.get(dim).getJsonObject());
               reason = out.toString("UTF-8");
             } catch (Exception e) {
-              throw new SemanticException("Error writing dim pruning messages", e);
+              throw new LensException("Error writing dim pruning messages", e);
             } finally {
               if (out != null) {
                 try {
                   out.close();
                 } catch (IOException e) {
-                  throw new SemanticException(e);
+                  throw new LensException(e);
                 }
               }
             }
           }
-          throw new SemanticException(ErrorMsg.NO_CANDIDATE_DIM_AVAILABLE, dim.getName(), reason);
+          throw new LensException(LensCubeErrorCode.NO_CANDIDATE_DIM_AVAILABLE.getValue(), dim.getName(), reason);
         }
       }
     }
@@ -811,7 +812,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return dimsToQuery;
   }
 
-  private Set<CandidateFact> pickCandidateFactToQuery() throws SemanticException {
+  private Set<CandidateFact> pickCandidateFactToQuery() throws LensException {
     Set<CandidateFact> facts = null;
     if (hasCubeInQuery()) {
       if (candidateFactSets.size() > 0) {
@@ -827,18 +828,18 @@ public class CubeQueryContext implements TrackQueriedColumns {
             mapper.writeValue(out, factPruningMsgs.getJsonObject());
             reason = out.toString("UTF-8");
           } catch (Exception e) {
-            throw new SemanticException("Error writing fact pruning messages", e);
+            throw new LensException("Error writing fact pruning messages", e);
           } finally {
             if (out != null) {
               try {
                 out.close();
               } catch (IOException e) {
-                throw new SemanticException(e);
+                throw new LensException(e);
               }
             }
           }
         }
-        throw new SemanticException(ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE, reason);
+        throw new LensException(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getValue(), reason);
       }
     }
     return facts;
@@ -848,7 +849,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
   @Getter private Collection<CandidateFact> pickedFacts;
   @Getter private Collection<CandidateDim> pickedDimTables;
 
-  public String toHQL() throws SemanticException {
+  public String toHQL() throws LensException {
     Set<CandidateFact> cfacts = pickCandidateFactToQuery();
     Map<Dimension, CandidateDim> dimsToQuery = pickCandidateDimsToQuery(dimensions);
     if (autoJoinCtx != null) {
@@ -935,7 +936,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
   }
 
   private HQLContextInterface createHQLContext(Set<CandidateFact> facts, Map<Dimension, CandidateDim> dimsToQuery,
-    Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws SemanticException {
+    Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws LensException {
     if (facts == null || facts.size() == 0) {
       return new DimOnlyHQLContext(dimsToQuery, query);
     } else if (facts.size() == 1 && facts.iterator().next().getStorageTables().size() > 1) {
@@ -949,7 +950,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     }
   }
 
-  public ASTNode toAST(Context ctx) throws SemanticException {
+  public ASTNode toAST(Context ctx) throws LensException {
     String hql = toHQL();
     ParseDriver pd = new ParseDriver();
     ASTNode tree;
@@ -957,7 +958,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
       log.info("HQL:{}", hql);
       tree = pd.parse(hql, ctx);
     } catch (ParseException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
     return ParseUtils.findRootNonNullToken(tree);
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
index 72dc64b..0dfd7da 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
@@ -18,6 +18,8 @@
  */
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.error.LensCubeErrorCode.SYNTAX_ERROR;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
@@ -185,27 +187,37 @@ public class CubeQueryRewriter {
     rewriters.add(new LightestDimensionResolver(conf));
   }
 
-  public CubeQueryContext rewrite(ASTNode astnode) throws SemanticException, LensException {
-    CubeSemanticAnalyzer analyzer = new CubeSemanticAnalyzer(conf, hconf);
-    analyzer.analyze(astnode, qlCtx);
-    CubeQueryContext ctx = analyzer.getQueryContext();
+  public CubeQueryContext rewrite(ASTNode astnode) throws LensException {
+    CubeSemanticAnalyzer analyzer;
+    try {
+      analyzer = new CubeSemanticAnalyzer(conf, hconf);
+      analyzer.analyze(astnode, qlCtx);
+    } catch (SemanticException e) {
+      throw new LensException(SYNTAX_ERROR.getValue(), e, e.getMessage());
+    }
+    CubeQueryContext ctx = new CubeQueryContext(astnode, analyzer.getCubeQB(), conf, hconf);
     rewrite(rewriters, ctx);
     return ctx;
   }
 
-  public CubeQueryContext rewrite(String command) throws ParseException, SemanticException, LensException {
+  public CubeQueryContext rewrite(String command) throws LensException {
     if (command != null) {
       command = command.replace("\n", "");
     }
-    ParseDriver pd = new ParseDriver();
-    ASTNode tree = pd.parse(command, qlCtx, false);
-    tree = ParseUtils.findRootNonNullToken(tree);
+    ASTNode tree;
+    try {
+      ParseDriver pd = new ParseDriver();
+      tree = pd.parse(command, qlCtx, false);
+      tree = ParseUtils.findRootNonNullToken(tree);
+    } catch (ParseException e) {
+      throw new LensException(SYNTAX_ERROR.getValue(), e, e.getMessage());
+    }
     return rewrite(tree);
   }
 
   private static final String ITER_STR = "-ITER-";
 
-  private void rewrite(List<ContextRewriter> rewriters, CubeQueryContext ctx) throws SemanticException, LensException {
+  private void rewrite(List<ContextRewriter> rewriters, CubeQueryContext ctx) throws LensException {
     int i = 0;
     for (ContextRewriter rewriter : rewriters) {
       /*

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
index 68bffed..ebbe404 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
@@ -24,9 +24,10 @@ import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.*;
 
+import lombok.Getter;
+
 /**
  * Accepts cube query AST and rewrites into storage table query
  */
@@ -34,7 +35,8 @@ public class CubeSemanticAnalyzer extends SemanticAnalyzer {
   private final Configuration queryConf;
   private final HiveConf hiveConf;
   private final List<ValidationRule> validationRules = new ArrayList<ValidationRule>();
-  private CubeQueryContext cubeQl;
+  @Getter
+  private QB cubeQB;
 
   public CubeSemanticAnalyzer(Configuration queryConf, HiveConf hiveConf) throws SemanticException {
     super(hiveConf);
@@ -51,13 +53,7 @@ public class CubeSemanticAnalyzer extends SemanticAnalyzer {
   @Override
   public void analyzeInternal(ASTNode ast) throws SemanticException {
     reset();
-    QB qb = new QB(null, null, false);
-    // do not allow create table/view commands
-    // TODO Move this to a validation rule
-    if (ast.getToken().getType() == HiveParser.TOK_CREATETABLE
-      || ast.getToken().getType() == HiveParser.TOK_CREATEVIEW) {
-      throw new SemanticException(ErrorMsg.CREATE_NOT_ALLOWED);
-    }
+    cubeQB = new QB(null, null, false);
 
     if (ast.getToken().getType() == HiveParser.TOK_QUERY) {
       if (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.KW_CUBE) {
@@ -69,34 +65,9 @@ public class CubeSemanticAnalyzer extends SemanticAnalyzer {
       }
     }
     // analyzing from the ASTNode.
-    if (!doPhase1(ast, qb, initPhase1Ctx())) {
+    if (!doPhase1(ast, cubeQB, initPhase1Ctx())) {
       // if phase1Result false return
       return;
     }
-    cubeQl = new CubeQueryContext(ast, qb, queryConf, hiveConf);
-    // cubeQl.init();
-    // validate();
-
-    // TODO Move this to a validation Rule
-    // QBParseInfo qbp = qb.getParseInfo();
-    // TreeSet<String> ks = new TreeSet<String>(qbp.getClauseNames());
-    // if (ks.size() > 1) {
-    // throw new SemanticException("nested/sub queries not allowed yet");
-    // }
-    // Operator sinkOp = genPlan(qb);
-    // System.out.println(sinkOp.toString());
-  }
-
-  @Override
-  public void validate() throws SemanticException {
-    for (ValidationRule rule : validationRules) {
-      if (!rule.validate(cubeQl)) {
-        break;
-      }
-    }
-  }
-
-  public CubeQueryContext getQueryContext() {
-    return cubeQl;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
index 5c77548..486c6b3 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
@@ -29,12 +29,12 @@ import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.UpdatePeriod;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.time.DateUtils;
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import lombok.Data;
 import lombok.EqualsAndHashCode;
@@ -120,17 +120,17 @@ public final class DateUtil {
     throw new IllegalArgumentException("Unsupported formatting for date" + str);
   }
 
-  public static Date resolveDate(String str, Date now) throws SemanticException {
+  public static Date resolveDate(String str, Date now) throws LensException {
     if (RELDATE_VALIDATOR.matcher(str).matches()) {
       return resolveRelativeDate(str, now);
     } else {
       return resolveAbsoluteDate(str);
     }
   }
-  public static String relativeToAbsolute(String relative) throws SemanticException {
+  public static String relativeToAbsolute(String relative) throws LensException {
     return relativeToAbsolute(relative, new Date());
   }
-  public static String relativeToAbsolute(String relative, Date now) throws SemanticException {
+  public static String relativeToAbsolute(String relative, Date now) throws LensException {
     if (RELDATE_VALIDATOR.matcher(relative).matches()) {
       return ABSDATE_PARSER.get().format(resolveRelativeDate(relative, now));
     } else {
@@ -138,18 +138,18 @@ public final class DateUtil {
     }
   }
 
-  public static Date resolveAbsoluteDate(String str) throws SemanticException {
+  public static Date resolveAbsoluteDate(String str) throws LensException {
     try {
       return ABSDATE_PARSER.get().parse(getAbsDateFormatString(str));
     } catch (ParseException e) {
       log.error("Invalid date format. expected only {} date provided:{}", ABSDATE_FMT, str, e);
-      throw new SemanticException(e, ErrorMsg.WRONG_TIME_RANGE_FORMAT, ABSDATE_FMT, str);
+      throw new LensException(LensCubeErrorCode.WRONG_TIME_RANGE_FORMAT.getValue(), ABSDATE_FMT, str);
     }
   }
 
-  public static Date resolveRelativeDate(String str, Date now) throws SemanticException {
+  public static Date resolveRelativeDate(String str, Date now) throws LensException {
     if (StringUtils.isBlank(str)) {
-      throw new SemanticException(ErrorMsg.NULL_DATE_VALUE);
+      throw new LensException(LensCubeErrorCode.NULL_DATE_VALUE.getValue());
     }
 
     // Resolve NOW with proper granularity
@@ -182,7 +182,7 @@ public final class DateUtil {
         } else if ("second".equals(unit)) {
           calendar = DateUtils.truncate(calendar, Calendar.SECOND);
         } else {
-          throw new SemanticException(ErrorMsg.INVALID_TIME_UNIT, unit);
+          throw new LensException(LensCubeErrorCode.INVALID_TIME_UNIT.getValue(), unit);
         }
       }
     }
@@ -443,7 +443,7 @@ public final class DateUtil {
       this.calendarField = calendarField;
     }
 
-    static TimeDiff parseFrom(String diffStr) throws SemanticException {
+    static TimeDiff parseFrom(String diffStr) throws LensException {
       // Get the relative diff part to get eventual date based on now.
       Matcher qtyMatcher = P_QUANTITY.matcher(diffStr);
       int qty = 1;
@@ -477,7 +477,7 @@ public final class DateUtil {
         } else if ("second".equals(unit)) {
           return new TimeDiff(qty, SECOND);
         } else {
-          throw new SemanticException(ErrorMsg.INVALID_TIME_UNIT, unit);
+          throw new LensException(LensCubeErrorCode.INVALID_TIME_UNIT.getValue(), unit);
         }
       }
       return new TimeDiff(0, SECOND);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index 517e8fc..3ef9652 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -23,17 +23,17 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TABLE_OR_COL;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.ReferencedDimAtrribute.ChainRefCol;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.ExpressionResolver.ExprSpecContext;
 import org.apache.lens.cube.parse.ExpressionResolver.ExpressionContext;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.antlr.runtime.CommonToken;
 
@@ -134,7 +134,7 @@ public class DenormalizationResolver implements ContextRewriter {
     // When candidate table does not have the field, this method checks
     // if the field can be reached through reference,
     // if yes adds the ref usage and returns to true, if not returns false.
-    boolean addRefUsage(CandidateTable table, String col, String srcTbl) throws SemanticException {
+    boolean addRefUsage(CandidateTable table, String col, String srcTbl) throws LensException {
       // available as referenced col
       if (referencedCols.containsKey(col)) {
         for (ReferencedQueriedColumn refer : referencedCols.get(col)) {
@@ -195,7 +195,7 @@ public class DenormalizationResolver implements ContextRewriter {
     }
 
     public Set<Dimension> rewriteDenormctx(CandidateFact cfact, Map<Dimension, CandidateDim> dimsToQuery,
-      boolean replaceFact) throws SemanticException {
+      boolean replaceFact) throws LensException {
       Set<Dimension> refTbls = new HashSet<Dimension>();
 
       if (!tableToRefCols.isEmpty()) {
@@ -237,7 +237,7 @@ public class DenormalizationResolver implements ContextRewriter {
       return false;
     }
 
-    private void pickColumnsForTable(String tbl) throws SemanticException {
+    private void pickColumnsForTable(String tbl) throws LensException {
       if (tableToRefCols.containsKey(tbl)) {
         for (ReferencedQueriedColumn refered : tableToRefCols.get(tbl)) {
           if (!refered.col.isChainedColumn()) {
@@ -251,7 +251,7 @@ public class DenormalizationResolver implements ContextRewriter {
               }
             }
             if (refered.references.isEmpty()) {
-              throw new SemanticException("No reference column available for " + refered);
+              throw new LensException(LensCubeErrorCode.NO_REF_COL_AVAILABLE.getValue(), refered);
             }
             PickedReference picked = new PickedReference(refered.references.iterator().next(),
               cubeql.getAliasForTableName(refered.srcTable.getName()), tbl);
@@ -268,7 +268,7 @@ public class DenormalizationResolver implements ContextRewriter {
               }
             }
             if (refered.chainRefCols.isEmpty()) {
-              throw new SemanticException("No chain reference column available for " + refered);
+              throw new LensException("No chain reference column available for " + refered);
             }
             PickedReference picked =
               new PickedReference(refered.chainRefCols.iterator().next(),
@@ -280,7 +280,7 @@ public class DenormalizationResolver implements ContextRewriter {
       }
     }
 
-    private void replaceReferencedColumns(CandidateFact cfact, boolean replaceFact) throws SemanticException {
+    private void replaceReferencedColumns(CandidateFact cfact, boolean replaceFact) throws LensException {
       if (replaceFact
         && (tableToRefCols.get(cfact.getName()) != null && !tableToRefCols.get(cfact.getName()).isEmpty())) {
         resolveClause(cubeql, cfact.getSelectAST());
@@ -297,7 +297,7 @@ public class DenormalizationResolver implements ContextRewriter {
       resolveClause(cubeql, cubeql.getOrderByAST());
     }
 
-    private void resolveClause(CubeQueryContext query, ASTNode node) throws SemanticException {
+    private void resolveClause(CubeQueryContext query, ASTNode node) throws LensException {
       if (node == null) {
         return;
       }
@@ -366,7 +366,7 @@ public class DenormalizationResolver implements ContextRewriter {
    * replaced with the corresponding table reference
    */
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     DenormalizationContext denormCtx = cubeql.getDeNormCtx();
     if (denormCtx == null) {
       // Adds all the reference dimensions as eligible for denorm fields
@@ -400,8 +400,8 @@ public class DenormalizationResolver implements ContextRewriter {
           }
         }
         if (cubeql.getCandidateFacts().size() == 0) {
-          throw new SemanticException(ErrorMsg.NO_FACT_HAS_COLUMN, cubeql.getColumnsQueried(cubeql.getCube().getName())
-            .toString());
+          throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getValue(),
+              cubeql.getColumnsQueried(cubeql.getCube().getName()).toString());
         }
         cubeql.pruneCandidateFactSet(CandidateTablePruneCode.COLUMN_NOT_FOUND);
       }
@@ -422,7 +422,7 @@ public class DenormalizationResolver implements ContextRewriter {
           }
 
           if (cubeql.getCandidateDimTables().get(dim).size() == 0) {
-            throw new SemanticException(ErrorMsg.NO_DIM_HAS_COLUMN,
+            throw new LensException(LensCubeErrorCode.NO_DIM_HAS_COLUMN.getValue(),
               dim.toString(), cubeql.getColumnsQueried(dim.getName()).toString());
           }
         }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
index 3814cf6..bcfc1f6 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
@@ -24,9 +24,9 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.lens.cube.metadata.Dimension;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 /**
  * Dimension HQLContext.
@@ -47,7 +47,7 @@ abstract class DimHQLContext extends SimpleHQLContext {
 
   DimHQLContext(CubeQueryContext query, Map<Dimension, CandidateDim> dimsToQuery,
     Set<Dimension> queriedDims, String select, String where,
-    String groupby, String orderby, String having, Integer limit) throws SemanticException {
+    String groupby, String orderby, String having, Integer limit) throws LensException {
     super(select, groupby, orderby, having, limit);
     this.query = query;
     this.dimsToQuery = dimsToQuery;
@@ -55,7 +55,7 @@ abstract class DimHQLContext extends SimpleHQLContext {
     this.queriedDims = queriedDims;
   }
 
-  protected void setMissingExpressions() throws SemanticException {
+  protected void setMissingExpressions() throws LensException {
     setFrom(getFromString());
     setWhere(joinWithAnd(
       genWhereClauseWithDimPartitions(where), getQuery().getConf().getBoolean(
@@ -63,13 +63,13 @@ abstract class DimHQLContext extends SimpleHQLContext {
         ? getPostSelectionWhereClause() : null));
   }
 
-  protected String getPostSelectionWhereClause() throws SemanticException {
+  protected String getPostSelectionWhereClause() throws LensException {
     return null;
   }
 
 
 
-  protected String getFromString() throws SemanticException {
+  protected String getFromString() throws LensException {
     String fromString = getFromTable();
     if (query.isAutoJoinResolved()) {
       fromString =
@@ -82,7 +82,7 @@ abstract class DimHQLContext extends SimpleHQLContext {
 
   protected abstract CandidateFact getQueriedFact();
 
-  protected abstract String getFromTable() throws SemanticException;
+  protected abstract String getFromTable() throws LensException;
 
   public Map<Dimension, CandidateDim> getDimsToQuery() {
     return dimsToQuery;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
index 922501d..0c43d98 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
@@ -22,8 +22,7 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.lens.cube.metadata.Dimension;
-
-import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.lens.server.api.error.LensException;
 
 /**
  * HQL context class which passes all query strings from {@link CubeQueryContext} and works with all dimensions to be
@@ -33,23 +32,23 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
  */
 class DimOnlyHQLContext extends DimHQLContext {
 
-  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query) throws SemanticException {
+  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query) throws LensException {
     super(query, dimsToQuery, dimsToQuery.keySet(), query.getSelectTree(),
       query.getWhereTree(), query.getGroupByTree(), query.getOrderByTree(),
       query.getHavingTree(), query.getLimitValue());
   }
 
   DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query, String whereClause)
-    throws SemanticException {
+    throws LensException {
     super(query, dimsToQuery, dimsToQuery.keySet(), query.getSelectTree(), whereClause, query.getGroupByTree(), query
         .getOrderByTree(), query.getHavingTree(), query.getLimitValue());
   }
 
-  public String toHQL() throws SemanticException {
+  public String toHQL() throws LensException {
     return query.getInsertClause() + super.toHQL();
   }
 
-  protected String getFromTable() throws SemanticException {
+  protected String getFromTable() throws LensException {
     if (query.getAutoJoinCtx() != null && query.getAutoJoinCtx().isJoinsResolved()) {
       return getDimsToQuery().get(query.getAutoJoinCtx().getAutoJoinTarget()).getStorageString(
         query.getAliasForTableName(query.getAutoJoinCtx().getAutoJoinTarget().getName()));