You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by am...@apache.org on 2015/08/19 11:01:15 UTC

[1/3] incubator-lens git commit: LENS-187 : Move cube specific error message codes from Hive code to Lens

Repository: incubator-lens
Updated Branches:
  refs/heads/master c36859568 -> 3dc348ac2


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index fb8f61a..b34c217 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -28,6 +28,7 @@ import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCause;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode;
@@ -38,9 +39,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 
 import org.testng.annotations.BeforeTest;
@@ -72,9 +72,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
   @Test
   public void testQueryWithNow() throws Exception {
-    SemanticException e = getSemanticExceptionInRewrite(
+    LensException e = getLensExceptionInRewrite(
       "select SUM(msr2) from testCube where" + " time_range_in(d_time, 'NOW - 2DAYS', 'NOW')", getConf());
-    assertEquals(e.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE.getErrorCode());
+    assertEquals(e.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getValue());
   }
 
   @Test
@@ -116,21 +116,21 @@ public class TestCubeRewriter extends TestQueryRewrite {
     qCal.setTime(BEFORE_4_DAYS_START);
     Date from4DaysBackDate = qCal.getTime();
     String qFrom4DaysBackDate = qFmt.format(from4DaysBackDate);
-    SemanticException th = getSemanticExceptionInRewrite("select SUM(msr15) from testCube where"
+    LensException th = getLensExceptionInRewrite("select SUM(msr15) from testCube where"
       + " time_range_in(d_time, '"+ qFrom4DaysBackDate + "', '" + qTo + "')", getConf());
-    assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE.getErrorCode());
+    assertEquals(th.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getValue());
   }
 
   @Test
   public void testCandidateTables() throws Exception {
-    SemanticException th = getSemanticExceptionInRewrite(
+    LensException th = getLensExceptionInRewrite(
       "select dim12, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, getConf());
-    assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.COLUMN_NOT_FOUND.getErrorCode());
+    assertEquals(th.getErrorCode(), LensCubeErrorCode.COLUMN_NOT_FOUND.getValue());
 
     // this query should through exception because invalidMsr is invalid
-    th = getSemanticExceptionInRewrite(
+    th = getLensExceptionInRewrite(
       "SELECT cityid, invalidMsr from testCube " + " where " + TWO_DAYS_RANGE, getConf());
-    assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.COLUMN_NOT_FOUND.getErrorCode());
+    assertEquals(th.getErrorCode(), LensCubeErrorCode.COLUMN_NOT_FOUND.getValue());
   }
 
   @Test
@@ -185,9 +185,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     conf.setBoolean(CubeQueryConfUtil.LIGHTEST_FACT_FIRST, true);
 
-    SemanticException th = getSemanticExceptionInRewrite(
+    LensException th = getLensExceptionInRewrite(
       "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-    assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE.getErrorCode());
+    assertEquals(th.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getValue());
     PruneCauses.BriefAndDetailedError pruneCauses = extractPruneCause(th);
     int endIndex = MISSING_PARTITIONS.errorFormat.length() - 3;
     assertEquals(
@@ -200,7 +200,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
   }
 
   @Test
-  public void testDerivedCube() throws SemanticException, ParseException, LensException {
+  public void testDerivedCube() throws ParseException, LensException, HiveException, ClassNotFoundException {
     CubeQueryContext rewrittenQuery =
       rewriteCtx("cube select" + " SUM(msr2) from derivedCube where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
     String expected =
@@ -210,9 +210,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
     System.out.println("Non existing parts:" + rewrittenQuery.getNonExistingParts());
     assertNotNull(rewrittenQuery.getNonExistingParts());
 
-    SemanticException th = getSemanticExceptionInRewrite(
+    LensException th = getLensExceptionInRewrite(
       "select SUM(msr4) from derivedCube" + " where " + TWO_DAYS_RANGE, getConf());
-    assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.COLUMN_NOT_FOUND.getErrorCode());
+    assertEquals(th.getErrorCode(), LensCubeErrorCode.COLUMN_NOT_FOUND.getValue());
 
     // test join
     Configuration conf = getConf();
@@ -627,10 +627,10 @@ public class TestCubeRewriter extends TestQueryRewrite {
         getWhereForMonthly2months("c2_testfactmonthly"));
     compareQueries(expected, hqlQuery);
 
-    SemanticException th = getSemanticExceptionInRewrite(
+    LensException th = getLensExceptionInRewrite(
       "select name, SUM(msr2) from testCube" + " join citydim" + " where " + TWO_DAYS_RANGE
         + " group by name", getConf());
-    assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.NO_JOIN_CONDITION_AVAIABLE.getErrorCode());
+    assertEquals(th.getErrorCode(), LensCubeErrorCode.NO_JOIN_CONDITION_AVAIABLE.getValue());
   }
 
   @Test
@@ -894,7 +894,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
   @Test
   public void testSelectExprPromotionToGroupByWithSpacesInDimensionAliasAndWithAsKeywordBwColAndAlias()
-    throws SemanticException, ParseException, LensException {
+    throws ParseException, LensException, HiveException {
 
     String inputQuery = "cube select name as `Alias With Spaces`, SUM(msr2) as `TestMeasure` from testCube join citydim"
       + " on testCube.cityid = citydim.id where " + LAST_HOUR_TIME_RANGE;
@@ -911,7 +911,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
   @Test
   public void testSelectExprPromotionToGroupByWithSpacesInDimensionAliasAndWithoutAsKeywordBwColAndAlias()
-    throws SemanticException, ParseException, LensException {
+    throws ParseException, LensException, HiveException {
 
     String inputQuery = "cube select name `Alias With Spaces`, SUM(msr2) as `TestMeasure` from testCube join citydim"
       + " on testCube.cityid = citydim.id where " + LAST_HOUR_TIME_RANGE;
@@ -977,9 +977,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
     Configuration conf = getConf();
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
 
-    SemanticException e = getSemanticExceptionInRewrite(
+    LensException e = getLensExceptionInRewrite(
       "select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
-    assertEquals(e.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE.getErrorCode());
+    assertEquals(e.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getValue());
     PruneCauses.BriefAndDetailedError pruneCauses = extractPruneCause(e);
 
     assertEquals(
@@ -1026,8 +1026,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     compareQueries(expected, hqlQuery);
 
     // state table is present on c1 with partition dumps and partitions added
-    SemanticException e = getSemanticExceptionInRewrite("select name, capital from statedim ", conf);
-    assertEquals(e.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.NO_CANDIDATE_DIM_AVAILABLE.getErrorCode());
+    LensException e = getLensExceptionInRewrite("select name, capital from statedim ", conf);
+    assertEquals(e.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_DIM_AVAILABLE.getValue());
     assertEquals(extractPruneCause(e), new PruneCauses.BriefAndDetailedError(
       NO_CANDIDATE_STORAGES.errorFormat,
       new HashMap<String, List<CandidateTablePruneCause>>() {
@@ -1167,14 +1167,14 @@ public class TestCubeRewriter extends TestQueryRewrite {
       "SELECT ambigdim1, sum(testCube.msr1) FROM testCube join" + " citydim on testcube.cityid = citydim.id where "
         + TWO_DAYS_RANGE;
 
-    SemanticException th = getSemanticExceptionInRewrite(query, getConf());
-    assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.AMBIGOUS_CUBE_COLUMN.getErrorCode());
+    LensException th = getLensExceptionInRewrite(query, getConf());
+    assertEquals(th.getErrorCode(), LensCubeErrorCode.AMBIGOUS_CUBE_COLUMN.getValue());
 
     String q2 =
       "SELECT ambigdim2 from citydim join" + " statedim on citydim.stateid = statedim.id join countrydim on"
         + " statedim.countryid = countrydim.id";
-    th = getSemanticExceptionInRewrite(q2, getConf());
-    assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.AMBIGOUS_DIM_COLUMN.getErrorCode());
+    th = getLensExceptionInRewrite(q2, getConf());
+    assertEquals(th.getErrorCode(), LensCubeErrorCode.AMBIGOUS_DIM_COLUMN.getValue());
   }
 
   @Test
@@ -1466,8 +1466,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
       CubeQueryContext context = rewriteCtx(query, testConf);
       System.out.println("TestJoinPathTimeRange: " + context.toHQL());
       fail("Expected query to fail because of invalid column life");
-    } catch (SemanticException exc) {
-      assertEquals(exc.getCanonicalErrorMsg(), ErrorMsg.NO_JOIN_PATH);
+    } catch (LensException exc) {
+      assertEquals(exc.getErrorCode(), LensCubeErrorCode.NO_JOIN_PATH.getValue());
     } finally {
       // Add old column back
       cube.alterDimension(col);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
index 6717b5b..89d210f 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
@@ -35,9 +35,9 @@ import java.util.Date;
 import java.util.Set;
 
 import org.apache.lens.cube.parse.DateUtil.*;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.time.DateUtils;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
@@ -250,7 +250,7 @@ public class TestDateUtil {
   }
 
   @Test
-  public void testTimeDiff() throws SemanticException {
+  public void testTimeDiff() throws LensException {
     ArrayList<String> minusFourDays =
       Lists.newArrayList("-4 days", "-4days", "-4day", "-4 day", "- 4days", "- 4 day");
     ArrayList<String> plusFourDays =
@@ -280,7 +280,7 @@ public class TestDateUtil {
   }
 
   @Test
-  public void testRelativeToAbsolute() throws SemanticException {
+  public void testRelativeToAbsolute() throws LensException {
     Date now = new Date();
     Date nowDay = DateUtils.truncate(now, DAY_OF_MONTH);
     Date nowDayMinus2Days = DateUtils.add(nowDay, DAY_OF_MONTH, -2);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
index 1bf1a5c..bde4edd 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
@@ -31,8 +31,8 @@ import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCo
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.testng.Assert;
 import org.testng.annotations.BeforeTest;
@@ -54,7 +54,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
   }
 
   @Test
-  public void testDenormsAsDirectFields() throws SemanticException, ParseException, LensException {
+  public void testDenormsAsDirectFields() throws ParseException, LensException, HiveException {
     // denorm fields directly available
     String twoDaysITRange =
       "time_range_in(it, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
@@ -104,7 +104,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
   }
 
   @Test
-  public void testDenormsWithJoins() throws SemanticException, ParseException, LensException {
+  public void testDenormsWithJoins() throws ParseException, LensException, HiveException, ClassNotFoundException {
     // all following queries use joins to get denorm fields
     Configuration tconf = new Configuration(this.conf);
     tconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
@@ -149,7 +149,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
         null, " group by testdim3.name, (testdim2.bigid1)", null,
         getWhereForDailyAndHourly2days(cubeName, "c1_summary2"));
     TestCubeRewriter.compareQueries(expected, hqlQuery);
-    SemanticException e = getSemanticExceptionInRewrite(
+    LensException e = getLensExceptionInRewrite(
       "select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE, tconf);
     Assert.assertEquals(extractPruneCause(e), new PruneCauses.BriefAndDetailedError(
       CandidateTablePruneCode.NO_CANDIDATE_STORAGES.errorFormat,
@@ -206,26 +206,23 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
   public void testDimensionQuery() throws Exception {
     String hqlQuery = rewrite("select citydim.name, citydim.statename from" + " citydim", conf);
 
-    String joinExpr =
-      " join " + getDbName() + "c1_statetable statedim on"
+    String joinExpr = " join " + getDbName() + "c1_statetable statedim on"
         + " citydim.stateid = statedim.id and (statedim.dt = 'latest')";
-    String expected =
-      getExpectedQuery("citydim", "SELECT citydim.name, statedim.name FROM ", joinExpr, null, null, "c1_citytable",
-        true);
+    String expected = getExpectedQuery("citydim", "SELECT citydim.name, statedim.name FROM ", joinExpr, null, null,
+        "c1_citytable", true);
     TestCubeRewriter.compareQueries(expected, hqlQuery);
 
     hqlQuery = rewrite("select citydim.statename, citydim.name  from" + " citydim", conf);
 
-    expected =
-      getExpectedQuery("citydim", "SELECT statedim.name, citydim.name FROM ", joinExpr, null, null, "c1_citytable",
-        true);
+    expected = getExpectedQuery("citydim", "SELECT statedim.name, citydim.name FROM ", joinExpr, null, null,
+        "c1_citytable", true);
     TestCubeRewriter.compareQueries(expected, hqlQuery);
 
-    // Query would fail because citydim.nocandidatecol does not exist in any candidate
-    SemanticException e = getSemanticExceptionInRewrite(
-      "select citydim.name, citydim.statename, citydim.nocandidatecol from citydim", conf);
-    Assert.assertEquals(e.getMessage(),
-      "No dimension table has the queried columns for citydim, columns: [name, statename, nocandidatecol]");
+    // Query would fail because citydim.nocandidatecol does not exist in any
+    // candidate
+    Assert.assertEquals(getLensExceptionErrorMessageInRewrite(
+        "select citydim.name, citydim.statename, citydim.nocandidatecol " + "from citydim", conf),
+        "No dimension table has the queried columns " + "for citydim, columns: [name, statename, nocandidatecol]");
   }
 
   @Test
@@ -314,8 +311,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
 
   @Test
   public void testNonExistingDimension() throws Exception {
-    SemanticException e = getSemanticExceptionInRewrite("select nonexist.name, msr2 from testCube where "
-      + TWO_DAYS_RANGE, conf);
-    Assert.assertEquals(e.getMessage(), "Neither cube nor dimensions accessed in the query");
+    Assert.assertEquals(getLensExceptionErrorMessageInRewrite("select nonexist.name, msr2 from testCube where "
+        + TWO_DAYS_RANGE, conf), "Neither cube nor dimensions accessed in the query");
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
index 7f872e9..ca07a0d 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
@@ -21,13 +21,13 @@ package org.apache.lens.cube.parse;
 
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.testng.Assert;
 import org.testng.annotations.BeforeTest;
@@ -50,15 +50,19 @@ public class TestExpressionResolver extends TestQueryRewrite {
 
   @Test
   public void testColumnErrors() throws Exception {
-    SemanticException th;
-    th = getSemanticExceptionInRewrite("select nocolexpr, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-    Assert.assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.COLUMN_NOT_FOUND.getErrorCode());
-    Assert.assertTrue(th.getMessage().contains("nonexist"));
+    LensException th;
+    th = getLensExceptionInRewrite("select nocolexpr, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
+    Assert.assertEquals(th.getErrorCode(), LensCubeErrorCode.COLUMN_NOT_FOUND.getValue());
 
-    th = getSemanticExceptionInRewrite("select invalidexpr, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE,
-      conf);
-    Assert.assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.COLUMN_NOT_FOUND.getErrorCode());
-    Assert.assertTrue(th.getMessage().contains("invalidexpr"));
+    Assert.assertTrue(getLensExceptionErrorMessageInRewrite(
+        "select nocolexpr, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf).contains("nonexist"));
+
+    Assert.assertTrue(getLensExceptionErrorMessageInRewrite(
+        "select invalidexpr, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf).contains("invalidexpr"));
+
+    th = getLensExceptionInRewrite("select invalidexpr, " + "SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE,
+        conf);
+    Assert.assertEquals(th.getErrorCode(), LensCubeErrorCode.COLUMN_NOT_FOUND.getValue());
   }
 
   @Test
@@ -330,10 +334,10 @@ public class TestExpressionResolver extends TestQueryRewrite {
   }
 
   @Test
-  public void testDerivedCube() throws SemanticException, ParseException, LensException {
-    SemanticException th =
-      getSemanticExceptionInRewrite("select avgmsr from derivedCube" + " where " + TWO_DAYS_RANGE, conf);
-    Assert.assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.COLUMN_NOT_FOUND.getErrorCode());
+  public void testDerivedCube() throws ParseException, LensException, HiveException {
+    LensException th =
+      getLensExceptionInRewrite("select avgmsr from derivedCube" + " where " + TWO_DAYS_RANGE, conf);
+    Assert.assertEquals(th.getErrorCode(), LensCubeErrorCode.COLUMN_NOT_FOUND.getValue());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
index ed08605..6150b14 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
@@ -25,6 +25,7 @@ import static org.testng.Assert.*;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.SchemaGraph.TableRelationship;
 import org.apache.lens.server.api.error.LensException;
@@ -32,10 +33,8 @@ import org.apache.lens.server.api.error.LensException;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.testng.Assert;
 import org.testng.annotations.AfterTest;
@@ -161,7 +160,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     }
   }
 
-  private String getAutoResolvedFromString(CubeQueryContext query) throws SemanticException {
+  private String getAutoResolvedFromString(CubeQueryContext query) throws LensException {
     return query.getHqlContext().getFrom();
   }
 
@@ -219,7 +218,7 @@ public class TestJoinResolver extends TestQueryRewrite {
 
     // Test 3 Dim only query should throw error
     String errDimOnlyQuery = "select citydim.id, testDim4.name FROM citydim where " + TWO_DAYS_RANGE;
-    getSemanticExceptionInRewrite(errDimOnlyQuery, hconf);
+    getLensExceptionInRewrite(errDimOnlyQuery, hconf);
   }
 
   @Test
@@ -376,7 +375,7 @@ public class TestJoinResolver extends TestQueryRewrite {
   }
 
   @Test
-  public void testJoinChains() throws SemanticException, ParseException, LensException {
+  public void testJoinChains() throws ParseException, LensException, HiveException {
     String query, hqlQuery, expected;
 
     // Single joinchain with direct link
@@ -574,14 +573,14 @@ public class TestJoinResolver extends TestQueryRewrite {
   }
 
   @Test
-  public void testConflictingJoins() throws ParseException, LensException {
+  public void testConflictingJoins() throws ParseException, LensException, HiveException {
     // Single joinchain with two paths, intermediate dimension accessed separately by name.
     String query = "select cityState.name, citydim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
     try {
       rewrite(query, hconf);
       Assert.fail("Should have failed. "
         + "The table citydim is getting accessed as both chain and without chain ");
-    } catch (SemanticException e) {
+    } catch (LensException e) {
       Assert.assertEquals(e.getMessage().toLowerCase(),
         "Table citydim is getting accessed via joinchain: citystate and no chain at all".toLowerCase());
     }
@@ -592,7 +591,7 @@ public class TestJoinResolver extends TestQueryRewrite {
       rewrite(query, hconf);
       Assert.fail("Should have failed. "
         + "The table citydim is getting accessed as both chain and without chain ");
-    } catch (SemanticException e) {
+    } catch (LensException e) {
       Assert.assertEquals(e.getMessage().toLowerCase(),
         "Table citydim is getting accessed via joinchain: citystate and no chain at all".toLowerCase());
     }
@@ -605,7 +604,7 @@ public class TestJoinResolver extends TestQueryRewrite {
       Assert.fail("Should have failed. "
         + "It's not possible to resolve which statedim is being asked for when cityState and cubeState both end at"
         + " statedim table.");
-    } catch (SemanticException e) {
+    } catch (LensException e) {
       Assert.assertEquals(
         e.getMessage().indexOf("Table statedim has 2 different paths through joinchains"), 0);
     }
@@ -617,7 +616,7 @@ public class TestJoinResolver extends TestQueryRewrite {
       rewrite(query, hconf);
       Assert.fail("Should have failed. "
         + "The table statedim is getting accessed as both cubeState and statedim ");
-    } catch (SemanticException e) {
+    } catch (LensException e) {
       Assert.assertEquals(e.getMessage().toLowerCase(),
         "Table statedim is getting accessed via two different names: [cubestate, statedim]".toLowerCase());
     }
@@ -627,7 +626,7 @@ public class TestJoinResolver extends TestQueryRewrite {
       rewrite(query, hconf);
       Assert.fail("Should have failed. "
         + "The table statedim is getting accessed as both cubeState and statedim ");
-    } catch (SemanticException e) {
+    } catch (LensException e) {
       Assert.assertEquals(e.getMessage().toLowerCase(),
         "Table statedim is getting accessed via two different names: [citystate, statedim]".toLowerCase());
     }
@@ -640,14 +639,14 @@ public class TestJoinResolver extends TestQueryRewrite {
       rewrite(failingQuery, conf);
       Assert.fail("Should have failed. "
         + "The table citydim is getting accessed as both chain and without chain ");
-    } catch (SemanticException e) {
+    } catch (LensException e) {
       Assert.assertEquals(e.getMessage().toLowerCase(),
         "Table citydim is getting accessed via joinchain: citystate and no chain at all".toLowerCase());
     }
   }
 
   @Test
-  public void testMultiPaths() throws SemanticException, ParseException, LensException {
+  public void testMultiPaths() throws ParseException, LensException, HiveException {
     String query, hqlQuery, expected;
 
     query = "select testdim3.name, sum(msr2) from testcube where " + TWO_DAYS_RANGE;
@@ -744,13 +743,13 @@ public class TestJoinResolver extends TestQueryRewrite {
   }
 
   @Test
-  public void testUnreachableDim() throws ParseException, LensException {
-    SemanticException e1 = getSemanticExceptionInRewrite("select urdimid from testdim2", hconf);
+  public void testUnreachableDim() throws ParseException, LensException, HiveException {
+    LensException e1 = getLensExceptionInRewrite("select urdimid from testdim2", hconf);
     assertNotNull(e1);
-    assertEquals(e1.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.NO_DIM_HAS_COLUMN.getErrorCode());
+    assertEquals(e1.getErrorCode(), LensCubeErrorCode.NO_DIM_HAS_COLUMN.getValue());
 
-    SemanticException e2 = getSemanticExceptionInRewrite("select urdimid from testcube where " + TWO_DAYS_RANGE, hconf);
+    LensException e2 = getLensExceptionInRewrite("select urdimid from testcube where " + TWO_DAYS_RANGE, hconf);
     assertNotNull(e2);
-    assertEquals(e2.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE.getErrorCode());
+    assertEquals(e2.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getValue());
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
index d16f6a5..d69635d 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
@@ -21,12 +21,14 @@ package org.apache.lens.cube.parse;
 
 import java.io.IOException;
 
+import org.apache.lens.api.error.ErrorCollection;
+import org.apache.lens.api.error.ErrorCollectionFactory;
+import org.apache.lens.api.error.LensError;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.codehaus.jackson.map.ObjectMapper;
@@ -73,39 +75,58 @@ public abstract class TestQueryRewrite {
     SessionState.get().setCurrentDatabase(TestQueryRewrite.class.getSimpleName());
   }
 
-  protected String rewrite(String query, Configuration conf) throws SemanticException, ParseException, LensException {
+  protected String rewrite(String query, Configuration conf) throws LensException, ParseException {
     String rewrittenQuery = rewriteCtx(query, conf).toHQL();
     log.info("Rewritten query: {}", rewrittenQuery);
     return rewrittenQuery;
   }
 
   protected CubeQueryContext rewriteCtx(String query, Configuration conf)
-    throws SemanticException, ParseException, LensException {
+    throws LensException, ParseException {
     log.info("User query: {}", query);
     CubeQueryRewriter driver = new CubeQueryRewriter(conf, hconf);
     return driver.rewrite(query);
   }
 
-  static PruneCauses.BriefAndDetailedError extractPruneCause(SemanticException e) {
+  static PruneCauses.BriefAndDetailedError extractPruneCause(LensException e) throws ClassNotFoundException {
     try {
+      ErrorCollection errorCollection = new ErrorCollectionFactory().createErrorCollection();
+      final LensError lensError = errorCollection.getLensError(e.getErrorCode());
       return new ObjectMapper().readValue(
-        e.getMessage().substring(e.getMessage().indexOf("{"), e.getMessage().length()),
+          e.getFormattedErrorMsg(lensError).substring(e.getFormattedErrorMsg(lensError)
+              .indexOf("{"),  e.getFormattedErrorMsg(lensError).length()),
         new TypeReference<PruneCauses.BriefAndDetailedError>() {});
     } catch (IOException e1) {
       throw new RuntimeException("!!!");
     }
   }
 
-  protected SemanticException getSemanticExceptionInRewrite(String query, Configuration conf)
-    throws ParseException, LensException {
+  protected LensException getLensExceptionInRewrite(String query, Configuration conf)
+    throws LensException, ParseException {
     try {
       String hql = rewrite(query, conf);
       Assert.fail("Should have thrown exception. But rewrote the query : " + hql);
       // unreachable
       return null;
-    } catch (SemanticException e) {
-      log.error("Semantic exception in Rewrite.", e);
+    } catch (LensException e) {
+      log.error("Lens exception in Rewrite.", e);
       return e;
     }
   }
+
+  protected String getLensExceptionErrorMessageInRewrite(String query, Configuration conf) throws LensException,
+      ParseException, ClassNotFoundException {
+    try {
+      String hql = rewrite(query, conf);
+      Assert.fail("Should have thrown exception. But rewrote the query : " + hql);
+      // unreachable
+      return null;
+    } catch (LensException e) {
+      ErrorCollection errorCollection = new ErrorCollectionFactory().createErrorCollection();
+      final LensError lensError = errorCollection.getLensError(e.getErrorCode());
+      log.error("Lens exception in Rewrite.", e);
+      return e.getFormattedErrorMsg(lensError);
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
index 0805fb5..4d3a3dc 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
@@ -28,8 +28,8 @@ import org.apache.lens.driver.cube.RewriterPlan;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.testng.Assert;
 import org.testng.annotations.Test;
@@ -108,7 +108,7 @@ public class TestRewriterPlan extends TestQueryRewrite {
   }
 
   @Test
-  public void testUnimplemented() throws SemanticException, ParseException, LensException {
+  public void testUnimplemented() throws ParseException, LensException, HiveException {
     CubeQueryContext ctx = rewriteCtx("cube select SUM(msr2) from testCube where " + TWO_DAYS_RANGE, conf);
     ctx.toHQL();
     RewriterPlan plan = new RewriterPlan(Collections.singleton(ctx));

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
index 26836fe..3c3aa9c 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
@@ -23,14 +23,14 @@ import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import java.util.List;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.TestCubeMetastoreClient;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.testng.Assert;
 import org.testng.annotations.AfterTest;
@@ -54,7 +54,7 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
   }
 
   public static String rewrite(CubeQueryRewriter driver, String query)
-    throws SemanticException, ParseException, LensException {
+    throws ParseException, LensException, HiveException {
     CubeQueryContext rewrittenQuery = driver.rewrite(query);
     return rewrittenQuery.toHQL();
   }
@@ -66,9 +66,9 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
       // this should throw exception because from date is after to date
       driver.rewrite("SELECT cityid, testCube.msr2 from" + " testCube where " + timeRange2);
       Assert.fail("Should not reach here");
-    } catch (SemanticException exc) {
+    } catch (LensException exc) {
       Assert.assertNotNull(exc);
-      Assert.assertEquals(exc.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.FROM_AFTER_TO.getErrorCode());
+      Assert.assertEquals(exc.getErrorCode(), LensCubeErrorCode.FROM_AFTER_TO.getValue());
     }
   }
 
@@ -79,9 +79,9 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
       // this should throw exception because from date and to date are same
       driver.rewrite("SELECT cityid, testCube.msr2 from" + " testCube where " + equalTimeRange);
       Assert.fail("Should not reach here");
-    } catch (SemanticException exc) {
+    } catch (LensException exc) {
       Assert.assertNotNull(exc);
-      Assert.assertEquals(exc.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.INVALID_TIME_RANGE.getErrorCode());
+      Assert.assertEquals(exc.getErrorCode(), LensCubeErrorCode.INVALID_TIME_RANGE.getValue());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
index a30a114..cb27d50 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
@@ -30,7 +30,6 @@ import org.apache.lens.server.api.error.LensException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
@@ -57,9 +56,9 @@ public class TestTimeRangeResolver extends TestQueryRewrite {
   }
 
   @Test
-  public void testFactValidity() throws ParseException, SemanticException, LensException {
-    SemanticException e =
-      getSemanticExceptionInRewrite("cube select msr2 from " + cubeName + " where " + LAST_YEAR_RANGE,
+  public void testFactValidity() throws ParseException, LensException, HiveException, ClassNotFoundException {
+    LensException e =
+      getLensExceptionInRewrite("cube select msr2 from " + cubeName + " where " + LAST_YEAR_RANGE,
         getConf());
     PruneCauses.BriefAndDetailedError causes = extractPruneCause(e);
     assertTrue(causes.getBrief().contains("Columns [msr2] are not present in any table"));

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
index 453a102..e5540d2 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
@@ -24,11 +24,10 @@ import java.text.SimpleDateFormat;
 import java.util.LinkedHashSet;
 import java.util.Set;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.cube.metadata.UpdatePeriod;
-
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.lens.server.api.error.LensException;
 
 import org.testng.Assert;
 import org.testng.annotations.Test;
@@ -57,11 +56,11 @@ public abstract class TestTimeRangeWriter {
     answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null));
     answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null));
 
-    SemanticException th = null;
+    LensException th = null;
     String whereClause = null;
     try {
       whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
-    } catch (SemanticException e) {
+    } catch (LensException e) {
       log.error("Semantic exception while testing disjoint parts.", e);
       th = e;
     }
@@ -69,7 +68,7 @@ public abstract class TestTimeRangeWriter {
     if (failDisjoint()) {
       Assert.assertNotNull(th);
       Assert
-        .assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.CANNOT_USE_TIMERANGE_WRITER.getErrorCode());
+        .assertEquals(th.getErrorCode(), LensCubeErrorCode.CANNOT_USE_TIMERANGE_WRITER.getValue());
     } else {
       Assert.assertNull(th);
       validateDisjoint(whereClause, null);
@@ -84,7 +83,7 @@ public abstract class TestTimeRangeWriter {
     th = null;
     try {
       whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
-    } catch (SemanticException e) {
+    } catch (LensException e) {
       th = e;
     }
 
@@ -98,7 +97,7 @@ public abstract class TestTimeRangeWriter {
   }
 
   @Test
-  public void testConsecutiveDayParts() throws SemanticException {
+  public void testConsecutiveDayParts() throws LensException {
     Set<FactPartition> answeringParts = new LinkedHashSet<FactPartition>();
     answeringParts.add(new FactPartition("dt", CubeTestSetup.ONE_DAY_BACK, UpdatePeriod.DAILY, null, null));
     answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null));
@@ -117,7 +116,7 @@ public abstract class TestTimeRangeWriter {
   }
 
   @Test
-  public void testSinglePart() throws SemanticException {
+  public void testSinglePart() throws LensException {
     Set<FactPartition> answeringParts = new LinkedHashSet<FactPartition>();
     answeringParts.add(new FactPartition("dt", CubeTestSetup.ONE_DAY_BACK, UpdatePeriod.DAILY, null, null));
     String whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
index 8da740b..2083ef9 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
@@ -28,11 +28,11 @@ import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.UpdatePeriod;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.testng.Assert;
 import org.testng.annotations.BeforeTest;
@@ -77,17 +77,17 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
 
   @Test
   public void testCubeQueryContinuousUpdatePeriod() throws Exception {
-    SemanticException th = null;
+    LensException th = null;
     try {
       rewrite("cube select" + " SUM(msr2) from testCube where " + TWO_DAYS_RANGE, conf);
-    } catch (SemanticException e) {
+    } catch (LensException e) {
       th = e;
       log.error("Semantic exception while testing cube query.", e);
     }
     if (!CubeTestSetup.isZerothHour()) {
       Assert.assertNotNull(th);
       Assert
-        .assertEquals(th.getCanonicalErrorMsg().getErrorCode(), ErrorMsg.CANNOT_USE_TIMERANGE_WRITER.getErrorCode());
+      .assertEquals(th.getErrorCode(), LensCubeErrorCode.CANNOT_USE_TIMERANGE_WRITER.getValue());
     }
     // hourly partitions for two days
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
index 9222d87..3415a1e 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
@@ -37,8 +37,6 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.testng.Assert;
@@ -214,13 +212,11 @@ public class TestColumnarSQLRewriter {
   /**
    * Test no rewrite.
    *
-   * @throws ParseException    the parse exception
-   * @throws SemanticException the semantic exception
    * @throws LensException     the lens exception
    */
   @Test
   // Testing multiple queries in one instance
-  public void testNoRewrite() throws ParseException, SemanticException, LensException {
+  public void testNoRewrite() throws LensException {
 
     SessionState.start(hconf);
 
@@ -256,12 +252,10 @@ public class TestColumnarSQLRewriter {
   /**
    * Test join cond.
    *
-   * @throws ParseException    the parse exception
-   * @throws SemanticException the semantic exception
    * @throws LensException     the lens exception
    */
   @Test
-  public void testJoinCond() throws ParseException, SemanticException, LensException {
+  public void testJoinCond() throws LensException {
 
     String query =
 
@@ -290,12 +284,10 @@ public class TestColumnarSQLRewriter {
   /**
    * Test all filter cond.
    *
-   * @throws ParseException    the parse exception
-   * @throws SemanticException the semantic exception
    * @throws LensException     the lens exception
    */
   @Test
-  public void testAllFilterCond() throws ParseException, SemanticException, LensException {
+  public void testAllFilterCond() throws LensException {
 
     String query =
 
@@ -322,12 +314,10 @@ public class TestColumnarSQLRewriter {
   /**
    * Test all agg column.
    *
-   * @throws ParseException    the parse exception
-   * @throws SemanticException the semantic exception
    * @throws LensException     the lens exception
    */
   @Test
-  public void testAllAggColumn() throws ParseException, SemanticException, LensException {
+  public void testAllAggColumn() throws LensException {
 
     String query =
 
@@ -355,12 +345,10 @@ public class TestColumnarSQLRewriter {
   /**
    * Test all fact keys.
    *
-   * @throws ParseException    the parse exception
-   * @throws SemanticException the semantic exception
    * @throws LensException     the lens exception
    */
   @Test
-  public void testAllFactKeys() throws ParseException, SemanticException, LensException {
+  public void testAllFactKeys() throws LensException {
 
     String query =
 
@@ -385,12 +373,10 @@ public class TestColumnarSQLRewriter {
   /**
    * Test fact sub queries.
    *
-   * @throws ParseException    the parse exception
-   * @throws SemanticException the semantic exception
    * @throws LensException     the lens exception
    */
   @Test
-  public void testFactSubQueries() throws ParseException, SemanticException, LensException {
+  public void testFactSubQueries() throws LensException {
 
     String query =
 
@@ -420,12 +406,10 @@ public class TestColumnarSQLRewriter {
   /**
    * Test rewritten query.
    *
-   * @throws ParseException    the parse exception
-   * @throws SemanticException the semantic exception
    * @throws LensException     the lens exception
    */
   @Test
-  public void testRewrittenQuery() throws ParseException, SemanticException, LensException {
+  public void testRewrittenQuery() throws LensException {
 
     String query =
 
@@ -472,12 +456,10 @@ public class TestColumnarSQLRewriter {
   /**
    * Test union query.
    *
-   * @throws ParseException    the parse exception
-   * @throws SemanticException the semantic exception
    * @throws LensException     the lens exception
    */
   @Test
-  public void testUnionQuery() throws ParseException, SemanticException, LensException {
+  public void testUnionQuery() throws LensException {
 
     String query =
 
@@ -544,7 +526,7 @@ public class TestColumnarSQLRewriter {
   }
 
   @Test
-  public void testNoAggCol() throws ParseException, SemanticException, LensException {
+  public void testNoAggCol() throws LensException {
 
     String query = "SELECT  distinct ( location_dim . id ) FROM location_dim "
       + "location_dim join time_dim time_dim on location_dim.time_id = time_dim.id "
@@ -564,7 +546,7 @@ public class TestColumnarSQLRewriter {
   }
 
   @Test
-  public void testSkipExpression() throws ParseException, SemanticException, LensException {
+  public void testSkipExpression() throws LensException {
 
     String query = "select fact.time_key,time_dim.day_of_week,time_dim.day,item_dim.item_key, "
         + "sum(case when fact.dollars_sold = 0 then 0.0 else fact.dollars_sold end) dollars_sold, "
@@ -602,7 +584,7 @@ public class TestColumnarSQLRewriter {
   }
 
   @Test
-  public void testAlias() throws ParseException, SemanticException, LensException {
+  public void testAlias() throws LensException {
 
     String query = "select fact.time_key,time_dim.day_of_week,time_dim.day,item_dim.item_key, "
         + "sum(case when fact.dollars_sold = 0 then 0.0 end) as dollars_sold, "
@@ -654,7 +636,7 @@ public class TestColumnarSQLRewriter {
   }
 
   @Test
-  public void testFilter() throws ParseException, SemanticException, LensException {
+  public void testFilter() throws LensException {
 
     String query = "select max(fact.dollars_sold) from sales_fact fact "
         + "inner join time_dim time_dim on fact.time_key = time_dim.time_key "
@@ -696,7 +678,7 @@ public class TestColumnarSQLRewriter {
   }
 
   @Test
-  public void testCountReplace() throws ParseException, SemanticException, LensException {
+  public void testCountReplace() throws LensException {
 
     String query = "SELECT  count(location_dim.name) FROM location_dim "
         + "location_dim join time_dim time_dim on location_dim.time_id = time_dim.id "
@@ -717,7 +699,7 @@ public class TestColumnarSQLRewriter {
   }
 
   @Test
-  public void testReplaceAlias() throws ParseException, SemanticException, LensException {
+  public void testReplaceAlias() throws LensException {
 
     String query = "select fact.time_key,time_dim.day_of_week,time_dim.day,"
         + "case when sum(fact.dollars_sold) = 0 then 0.0 else sum(fact.dollars_sold) end dollars_sold "
@@ -747,7 +729,7 @@ public class TestColumnarSQLRewriter {
 
 
   @Test
-  public void testSkipSnowflakeJoinFact() throws ParseException, SemanticException, LensException {
+  public void testSkipSnowflakeJoinFact() throws LensException {
 
     String query = "SELECT (dim1 . date) date , sum((f . msr1)) msr1 , (dim2 . name) dim2_name, "
         + "(dim3 . name) dim3_name , (dim4 . name) dim4_name " + "FROM fact f "
@@ -779,7 +761,7 @@ public class TestColumnarSQLRewriter {
 
 
   @Test
-  public void testFactFilterPushDown() throws ParseException, SemanticException, LensException {
+  public void testFactFilterPushDown() throws LensException {
 
     String query = "SELECT (dim1 . date) date , sum((f . msr1)) msr1 , (dim2 . name) dim2_name  "
         + "FROM fact f  INNER JOIN dim1 dim1 ON f.dim1_id = dim1.id  and f.m2 = '1234' "
@@ -807,7 +789,7 @@ public class TestColumnarSQLRewriter {
   }
 
   @Test
-  public void testOrderByAlias() throws ParseException, SemanticException, LensException {
+  public void testOrderByAlias() throws LensException {
 
     String query = "SELECT (dim1 . date) dim1_date , sum((f . msr1)) msr1 , (dim2 . name) dim2_name  "
         + "FROM fact f  INNER JOIN dim1 dim1 ON f.dim1_id = dim1.id  and f.m2 = '1234' "
@@ -836,7 +818,7 @@ public class TestColumnarSQLRewriter {
   }
 
   @Test
-  public void testExcludeJoinFilterFromFactQuery() throws ParseException, SemanticException, LensException {
+  public void testExcludeJoinFilterFromFactQuery() throws LensException {
 
     String query = "SELECT (dim1 . date) dim1_date , sum((f . msr1)) msr1 , (dim2 . name) dim2_name  "
         + "FROM fact f  INNER JOIN dim1 dim1 ON f.dim1_id = dim1.id  and f.m2 = '1234' "

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensException.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensException.java b/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensException.java
index 123b6ee..603d7cb 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensException.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensException.java
@@ -119,6 +119,16 @@ public class LensException extends Exception {
   }
 
   /**
+   * Constructs a new Lens Exception with error code and error msg formatting arguments.
+   *
+   * @see Exception#Exception(Throwable)
+   */
+  public LensException(final int errorCode, @NonNull final Object... errorMsgFormattingArgs) {
+    this(null, errorCode, null, errorMsgFormattingArgs);
+  }
+
+
+  /**
    * Constructs a new Lens Exception with exception error message, error code, cause and error msg formatting arguments.
    *
    * @see Exception#Exception(Throwable)
@@ -179,7 +189,7 @@ public class LensException extends Exception {
     return false;
   }
 
-  protected String getFormattedErrorMsg(LensError lensError) {
+  public String getFormattedErrorMsg(LensError lensError) {
 
     return lensError.getFormattedErrorMsg(errorMsgFormattingArgs);
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-server/src/main/java/org/apache/lens/server/error/UnSupportedQuerySubmitOpException.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/error/UnSupportedQuerySubmitOpException.java b/lens-server/src/main/java/org/apache/lens/server/error/UnSupportedQuerySubmitOpException.java
index 4ab26ec..366b306 100644
--- a/lens-server/src/main/java/org/apache/lens/server/error/UnSupportedQuerySubmitOpException.java
+++ b/lens-server/src/main/java/org/apache/lens/server/error/UnSupportedQuerySubmitOpException.java
@@ -39,7 +39,7 @@ public class UnSupportedQuerySubmitOpException extends LensException {
   }
 
   @Override
-  protected String getFormattedErrorMsg(LensError lensError) {
+  public String getFormattedErrorMsg(LensError lensError) {
 
     final String supportedOpsStr = supportedOps.getSupportedOperationsAsString();
     return lensError.getFormattedErrorMsg(supportedOpsStr);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java b/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
index 5844010..6c464fb 100644
--- a/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
+++ b/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import lombok.Getter;
 import lombok.extern.slf4j.Slf4j;
@@ -84,11 +83,10 @@ public final class RewriteUtil {
    *
    * @param query the query
    * @return the list
-   * @throws SemanticException the semantic exception
    * @throws LensException     the lensexception
    */
   static List<CubeQueryInfo> findCubePositions(String query, HiveConf conf)
-    throws SemanticException, LensException {
+    throws LensException {
 
     ASTNode ast = HQLParser.parseHQL(query, conf);
     if (log.isDebugEnabled()) {
@@ -108,10 +106,10 @@ public final class RewriteUtil {
    * @param ast           the ast
    * @param cubeQueries   the cube queries
    * @param originalQuery the original query
-   * @throws SemanticException the semantic exception
+   * @throws LensException the lens exception
    */
   private static void findCubePositions(ASTNode ast, List<CubeQueryInfo> cubeQueries, String originalQuery)
-    throws SemanticException {
+    throws LensException {
     int childCount = ast.getChildCount();
     if (ast.getToken() != null) {
       if (log.isDebugEnabled() && ast.getChild(0) != null) {
@@ -140,7 +138,7 @@ public final class RewriteUtil {
             } else {
               // Not expected to reach here
               log.warn("Unknown query pattern found with AST:{}", ast.dump());
-              throw new SemanticException("Unknown query pattern");
+              throw new LensException("Unknown query pattern");
             }
           } else {
             // last child of union all query
@@ -195,9 +193,9 @@ public final class RewriteUtil {
    *
    * @param queryConf the query conf
    * @return the rewriter
-   * @throws SemanticException the semantic exception
+   * @throws LensException the lens exception
    */
-  static CubeQueryRewriter getCubeRewriter(Configuration queryConf, HiveConf hconf) throws SemanticException {
+  static CubeQueryRewriter getCubeRewriter(Configuration queryConf, HiveConf hconf) throws LensException {
     return new CubeQueryRewriter(queryConf, hconf);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index 131f008..b3f5d93 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -18,7 +18,7 @@
  */
 package org.apache.lens.server.query;
 
-import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
+import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
 
 import static org.apache.lens.server.common.RestAPITestUtil.execute;
 import static org.apache.lens.server.common.RestAPITestUtil.waitForQueryToFinish;
@@ -36,11 +36,11 @@ import javax.ws.rs.core.*;
 import org.apache.lens.api.APIResult;
 import org.apache.lens.api.LensConf;
 import org.apache.lens.api.LensSessionHandle;
-import org.apache.lens.api.error.LensCommonErrorCode;
 import org.apache.lens.api.jaxb.LensJAXBContextResolver;
 import org.apache.lens.api.query.*;
 import org.apache.lens.api.query.QueryStatus.Status;
 import org.apache.lens.api.result.*;
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.driver.hive.HiveDriver;
 import org.apache.lens.server.LensJerseyTest;
 import org.apache.lens.server.LensServices;
@@ -1425,9 +1425,9 @@ public class TestQueryService extends LensJerseyTest {
     final Response response = target.request()
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
 
-    LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(LensCommonErrorCode.INTERNAL_SERVER_ERROR.getValue(),
-      "Internal Server Error.", TestDataUtils.MOCK_STACK_TRACE);
-    ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(INTERNAL_SERVER_ERROR, expectedLensErrorTO);
+    LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(LensCubeErrorCode.NEITHER_CUBE_NOR_DIMENSION.getValue(),
+      "Neither cube nor dimensions accessed in the query", TestDataUtils.MOCK_STACK_TRACE);
+    ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(BAD_REQUEST, expectedLensErrorTO);
 
     expectedData.verify(response);
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java b/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
index affefe1..7be9793 100644
--- a/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
+++ b/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
@@ -34,6 +34,7 @@ import org.apache.lens.server.api.query.QueryContext;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.*;
 
 import org.mockito.Matchers;
@@ -76,7 +77,7 @@ public class TestRewriting {
   // change the number, if more tests for success needs to be added
   static final int NUM_SUCCESS = 36;
 
-  private CubeQueryRewriter getMockedRewriter() throws SemanticException, ParseException, LensException {
+  private CubeQueryRewriter getMockedRewriter() throws ParseException, LensException, HiveException {
     CubeQueryRewriter mockwriter = Mockito.mock(CubeQueryRewriter.class);
     Mockito.when(mockwriter.rewrite(Matchers.any(String.class))).thenAnswer(new Answer<CubeQueryContext>() {
       @Override
@@ -106,11 +107,11 @@ public class TestRewriting {
    *
    * @param query the query
    * @return the mocked cube context
-   * @throws SemanticException the semantic exception
+   * @throws LensException the lens exception
    * @throws ParseException    the parse exception
    */
   private CubeQueryContext getMockedCubeContext(String query)
-    throws SemanticException, ParseException, LensException {
+    throws ParseException, LensException {
     CubeQueryContext context = Mockito.mock(CubeQueryContext.class);
     Mockito.when(context.toHQL()).thenReturn(query.substring(4));
     Mockito.when(context.toAST(Matchers.any(Context.class))).thenReturn(HQLParser.parseHQL(query.substring(4), hconf));
@@ -122,10 +123,10 @@ public class TestRewriting {
    *
    * @param ast the ast
    * @return the mocked cube context
-   * @throws SemanticException the semantic exception
    * @throws ParseException    the parse exception
+   * @throws LensException  the lens exception
    */
-  private CubeQueryContext getMockedCubeContext(ASTNode ast) throws SemanticException, ParseException {
+  private CubeQueryContext getMockedCubeContext(ASTNode ast) throws ParseException, LensException {
     CubeQueryContext context = Mockito.mock(CubeQueryContext.class);
     if (ast.getToken().getType() == HiveParser.TOK_QUERY) {
       if (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.KW_CUBE) {
@@ -167,11 +168,11 @@ public class TestRewriting {
    * Test cube query.
    *
    * @throws ParseException    the parse exception
-   * @throws SemanticException the semantic exception
    * @throws LensException     the lens exception
+   * @throws HiveException
    */
   @Test
-  public void testCubeQuery() throws ParseException, SemanticException, LensException {
+  public void testCubeQuery() throws ParseException, LensException, HiveException {
     List<LensDriver> drivers = new ArrayList<LensDriver>();
     MockDriver driver = new MockDriver();
     LensConf lensConf = new LensConf();


[2/3] incubator-lens git commit: LENS-187 : Move cube specific error message codes from Hive code to Lens

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 5bdb412..1a347b2 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -35,12 +35,11 @@ import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
 import org.apache.lens.cube.parse.HQLParser.TreeNode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.antlr.runtime.CommonToken;
 
@@ -79,7 +78,7 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     ExpressionContext(CubeQueryContext cubeql, ExprColumn exprCol, AbstractBaseTable srcTable, String srcAlias)
-      throws SemanticException {
+      throws LensException {
       this.srcTable = srcTable;
       this.exprCol = exprCol;
       this.srcAlias = srcAlias;
@@ -89,19 +88,16 @@ class ExpressionResolver implements ContextRewriter {
       resolveColumnsAndAlias(cubeql);
       log.debug("All exprs for {} are {}", exprCol.getName(), allExprs);
     }
-    private void resolveColumnsAndAlias(CubeQueryContext cubeql) throws SemanticException {
+
+    private void resolveColumnsAndAlias(CubeQueryContext cubeql) throws LensException {
       for (ExprSpecContext esc : allExprs) {
         esc.resolveColumns(cubeql);
         esc.replaceAliasInAST(cubeql);
         for (String table : esc.getTblAliasToColumns().keySet()) {
-          try {
-            if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && !srcAlias.equals(table)) {
-              cubeql.addOptionalDimTable(table, null,
-                false, null, false, esc.getTblAliasToColumns().get(table).toArray(new String[0]));
-              esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
-            }
-          } catch (HiveException e) {
-            throw new SemanticException(e);
+          if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && !srcAlias.equals(table)) {
+            cubeql.addOptionalDimTable(table, null, false, null, false,
+                esc.getTblAliasToColumns().get(table).toArray(new String[0]));
+            esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
           }
         }
       }
@@ -109,14 +105,14 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     private void resolveColumnsAndReplaceAlias(CubeQueryContext cubeql, Set<ExprSpecContext> exprs)
-      throws SemanticException {
+      throws LensException {
       Set<ExprSpecContext> nestedExpressions = new LinkedHashSet<ExprSpecContext>();
       for (ExprSpecContext esc : exprs) {
         for (Map.Entry<String, Set<String>> entry : esc.getTblAliasToColumns().entrySet()) {
           if (entry.getKey().equals(CubeQueryContext.DEFAULT_TABLE)) {
             continue;
           }
-          AbstractBaseTable baseTable = (AbstractBaseTable)cubeql.getCubeTableForAlias(entry.getKey());
+          AbstractBaseTable baseTable = (AbstractBaseTable) cubeql.getCubeTableForAlias(entry.getKey());
           Set<String> exprCols = new HashSet<String>();
           for (String col : entry.getValue()) {
             // col is an expression
@@ -132,14 +128,10 @@ class ExpressionResolver implements ContextRewriter {
         esc.resolveColumns(cubeql);
         esc.replaceAliasInAST(cubeql);
         for (String table : esc.getTblAliasToColumns().keySet()) {
-          try {
-            if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && !srcAlias.equals(table)) {
-              cubeql.addOptionalDimTable(table, null, false, null, false,
+          if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && !srcAlias.equals(table)) {
+            cubeql.addOptionalDimTable(table, null, false, null, false,
                 esc.getTblAliasToColumns().get(table).toArray(new String[0]));
-              esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
-            }
-          } catch (HiveException e) {
-            throw new SemanticException(e);
+            esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
           }
         }
       }
@@ -147,7 +139,7 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     private void addAllNestedExpressions(CubeQueryContext cubeql, ExprSpecContext baseEsc, AbstractBaseTable baseTable,
-      Set<ExprSpecContext> nestedExpressions, Set<String> exprCols) throws SemanticException {
+      Set<ExprSpecContext> nestedExpressions, Set<String> exprCols) throws LensException {
       for (String col : exprCols) {
         Set<ExprSpecContext> replacedExpressions = new LinkedHashSet<ExprSpecContext>();
         for (ExprSpec es : baseTable.getExpressionByName(col).getExpressionSpecs()) {
@@ -168,7 +160,8 @@ class ExpressionResolver implements ContextRewriter {
     void addDirectlyAvailable(CandidateTable cTable) {
       directlyAvailableIn.add(cTable);
     }
-    void addEvaluable(CubeQueryContext cubeql, CandidateTable cTable, ExprSpecContext esc) throws SemanticException {
+
+    void addEvaluable(CubeQueryContext cubeql, CandidateTable cTable, ExprSpecContext esc) throws LensException {
       Set<ExprSpecContext> evalSet = evaluableExpressions.get(cTable);
       if (evalSet == null) {
         evalSet = new LinkedHashSet<ExprSpecContext>();
@@ -176,14 +169,10 @@ class ExpressionResolver implements ContextRewriter {
       }
       // add optional dimensions involved in expressions
       for (String table : esc.getTblAliasToColumns().keySet()) {
-        try {
-          if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && !srcAlias.equals(table)) {
-            cubeql.addOptionalExprDimTable(table, exprCol.getName(), srcAlias, cTable,
+        if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && !srcAlias.equals(table)) {
+          cubeql.addOptionalExprDimTable(table, exprCol.getName(), srcAlias, cTable,
               esc.getTblAliasToColumns().get(table).toArray(new String[0]));
-            esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
-          }
-        } catch (HiveException e) {
-          throw new SemanticException(e);
+          esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
         }
       }
       evalSet.add(esc);
@@ -228,19 +217,19 @@ class ExpressionResolver implements ContextRewriter {
     @Getter
     private Map<String, Set<String>> tblAliasToColumns = new HashMap<String, Set<String>>();
 
-    ExprSpecContext(ExprSpec exprSpec, CubeQueryContext cubeql) throws SemanticException {
+    ExprSpecContext(ExprSpec exprSpec, CubeQueryContext cubeql) throws LensException {
       // replaces table names in expression with aliases in the query
       finalAST = replaceAlias(exprSpec.getASTNode(), cubeql);
       exprSpecs.add(exprSpec);
     }
     public ExprSpecContext(ExprSpecContext nested, ExprSpec current, ASTNode node,
-      CubeQueryContext cubeql) throws SemanticException {
+      CubeQueryContext cubeql) throws LensException {
       exprSpecs.addAll(nested.exprSpecs);
       exprSpecs.add(current);
       finalAST = replaceAlias(node, cubeql);
     }
     public void replaceAliasInAST(CubeQueryContext cubeql)
-      throws SemanticException {
+      throws LensException {
       AliasReplacer.extractTabAliasForCol(cubeql, this);
       AliasReplacer.replaceAliases(finalAST, 0, cubeql.getColToTableAlias());
     }
@@ -253,7 +242,7 @@ class ExpressionResolver implements ContextRewriter {
       cols.add(column);
     }
 
-    void resolveColumns(CubeQueryContext cubeql) throws SemanticException {
+    void resolveColumns(CubeQueryContext cubeql) throws LensException {
       // finds all columns and table aliases in the expression
       ColumnResolver.getColsForTree(cubeql, finalAST, this);
     }
@@ -371,7 +360,7 @@ class ExpressionResolver implements ContextRewriter {
 
     //updates all expression specs which are evaluable
     public void updateEvaluables(String expr, CandidateTable cTable)
-      throws SemanticException {
+      throws LensException {
       String alias = cubeql.getAliasForTableName(cTable.getBaseTable().getName());
       ExpressionContext ec = getExpressionContext(expr, alias);
       if (cTable.getColumns().contains(expr)) {
@@ -450,7 +439,7 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     public Set<Dimension> rewriteExprCtx(CandidateFact cfact, Map<Dimension, CandidateDim> dimsToQuery,
-      boolean replaceFact) throws SemanticException {
+      boolean replaceFact) throws LensException {
       Set<Dimension> exprDims = new HashSet<Dimension>();
       if (!allExprsQueried.isEmpty()) {
         // pick expressions for fact
@@ -476,7 +465,7 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     private void replacePickedExpressions(CandidateFact cfact, boolean replaceFact)
-      throws SemanticException {
+      throws LensException {
       if (replaceFact) {
         replaceAST(cubeql, cfact.getSelectAST());
         replaceAST(cubeql, cfact.getWhereAST());
@@ -493,14 +482,14 @@ class ExpressionResolver implements ContextRewriter {
       replaceAST(cubeql, cubeql.getOrderByAST());
     }
 
-    private void replaceAST(final CubeQueryContext cubeql, ASTNode node) throws SemanticException {
+    private void replaceAST(final CubeQueryContext cubeql, ASTNode node) throws LensException {
       if (node == null) {
         return;
       }
       // Traverse the tree and resolve expression columns
       HQLParser.bft(node, new ASTNodeVisitor() {
         @Override
-        public void visit(TreeNode visited) throws SemanticException {
+        public void visit(TreeNode visited) throws LensException {
           ASTNode node = visited.getNode();
           int childcount = node.getChildCount();
           for (int i = 0; i < childcount; i++) {
@@ -637,7 +626,7 @@ class ExpressionResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     ExpressionResolverContext exprCtx = cubeql.getExprCtx();
     if (exprCtx == null) {
       exprCtx = new ExpressionResolverContext(cubeql);
@@ -735,7 +724,7 @@ class ExpressionResolver implements ContextRewriter {
     }
   }
 
-  private static ASTNode replaceAlias(final ASTNode expr, final CubeQueryContext cubeql) throws SemanticException {
+  private static ASTNode replaceAlias(final ASTNode expr, final CubeQueryContext cubeql) throws LensException {
     ASTNode finalAST = HQLParser.copyAST(expr);
     HQLParser.bft(finalAST, new ASTNodeVisitor() {
       @Override
@@ -762,14 +751,14 @@ class ExpressionResolver implements ContextRewriter {
   }
 
   private static void replaceColumnInAST(ASTNode expr, final String toReplace, final ASTNode columnAST)
-    throws SemanticException {
+    throws LensException {
     if (expr == null) {
       return;
     }
     // Traverse the tree and resolve expression columns
     HQLParser.bft(expr, new ASTNodeVisitor() {
       @Override
-      public void visit(TreeNode visited) throws SemanticException {
+      public void visit(TreeNode visited) throws LensException {
         ASTNode node = visited.getNode();
         int childcount = node.getChildCount();
         for (int i = 0; i < childcount; i++) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
index 623c58b..6c44233 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
@@ -22,8 +22,7 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.lens.cube.metadata.Dimension;
-
-import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.lens.server.api.error.LensException;
 
 import lombok.extern.slf4j.Slf4j;
 
@@ -37,7 +36,7 @@ public class FactHQLContext extends DimHQLContext {
   private final Set<Dimension> factDims;
 
   FactHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, Set<Dimension> factDims,
-    CubeQueryContext query) throws SemanticException {
+    CubeQueryContext query) throws LensException {
     super(query, dimsToQuery, factDims, fact.getSelectTree(), fact.getWhereTree(), fact.getGroupByTree(), null, fact
       .getHavingTree(), null);
     this.fact = fact;
@@ -55,7 +54,7 @@ public class FactHQLContext extends DimHQLContext {
     return fact;
   }
 
-  protected String getFromTable() throws SemanticException {
+  protected String getFromTable() throws LensException {
     return query.getQBFromString(fact, getDimsToQuery());
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
index 1a1232b..ab7a6d8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
@@ -27,11 +27,11 @@ import org.apache.lens.cube.metadata.DerivedCube;
 import org.apache.lens.cube.metadata.ReferencedDimAtrribute;
 import org.apache.lens.cube.metadata.ReferencedDimAtrribute.ChainRefCol;
 import org.apache.lens.cube.parse.ExpressionResolver.ExprSpecContext;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import com.google.common.collect.ImmutableSet;
 
@@ -41,11 +41,11 @@ import com.google.common.collect.ImmutableSet;
 public class FieldValidator implements ContextRewriter {
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws FieldsCannotBeQueriedTogetherException, SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     validateFields(cubeql);
   }
 
-  public void validateFields(CubeQueryContext cubeql) throws FieldsCannotBeQueriedTogetherException, SemanticException {
+  public void validateFields(CubeQueryContext cubeql) throws LensException {
     CubeInterface cube = cubeql.getCube();
     if (cube == null) {
       return;
@@ -57,7 +57,7 @@ public class FieldValidator implements ContextRewriter {
       try {
         dcubes = cubeql.getMetastoreClient().getAllDerivedQueryableCubes(cube);
       } catch (HiveException e) {
-        throw new SemanticException(e);
+        throw new LensException(e);
       }
 
       ImmutableSet<String> queriedTimeDimCols = cubeql.getQueriedTimeDimCols();
@@ -135,7 +135,7 @@ public class FieldValidator implements ContextRewriter {
                                                  final ASTNode tree,
                                                  final Set<String> dimAttributes,
                                                  final Set<String> chainSourceColumns,
-                                                 final Set<String> nonQueryableColumns) throws SemanticException {
+                                                 final Set<String> nonQueryableColumns) throws LensException {
     if (tree == null || !cubeql.hasCubeInQuery()) {
       return;
     }
@@ -144,7 +144,7 @@ public class FieldValidator implements ContextRewriter {
 
     HQLParser.bft(tree, new HQLParser.ASTNodeVisitor() {
       @Override
-      public void visit(HQLParser.TreeNode treeNode) throws SemanticException {
+      public void visit(HQLParser.TreeNode treeNode) throws LensException {
         ASTNode astNode = treeNode.getNode();
         if (astNode.getToken().getType() == HiveParser.DOT) {
           // At this point alias replacer has run, so all columns are of the type table.column name

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
index 4d2692b..97088a1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
@@ -25,13 +25,13 @@ import java.util.LinkedList;
 import java.util.List;
 
 import org.apache.lens.cube.metadata.AbstractBaseTable;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.antlr.runtime.CommonToken;
 import org.antlr.runtime.tree.Tree;
@@ -56,7 +56,7 @@ class GroupbyResolver implements ContextRewriter {
   }
 
   private void promoteSelect(CubeQueryContext cubeql, List<String> nonMsrNonAggSelExprsWithoutAlias,
-    List<String> groupByExprs) throws SemanticException {
+    List<String> groupByExprs) throws LensException {
     if (!selectPromotionEnabled) {
       return;
     }
@@ -77,7 +77,7 @@ class GroupbyResolver implements ContextRewriter {
             try {
               exprAST = HQLParser.parseExpr(expr);
             } catch (ParseException e) {
-              throw new SemanticException(e);
+              throw new LensException(e);
             }
             ASTNode groupbyAST = cubeql.getGroupByAST();
             if (!isConstantsUsed(exprAST)) {
@@ -124,7 +124,7 @@ class GroupbyResolver implements ContextRewriter {
   }
 
   private void promoteGroupby(CubeQueryContext cubeql, List<String> selectExprs, List<String> groupByExprs)
-    throws SemanticException {
+    throws LensException {
     if (!groupbyPromotionEnabled) {
       return;
     }
@@ -144,7 +144,7 @@ class GroupbyResolver implements ContextRewriter {
         try {
           exprAST = HQLParser.parseExpr(expr);
         } catch (ParseException e) {
-          throw new SemanticException(e);
+          throw new LensException(e);
         }
         addChildAtIndex(index, cubeql.getSelectAST(), exprAST);
         index++;
@@ -168,7 +168,7 @@ class GroupbyResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     // Process Aggregations by making sure that all group by keys are projected;
     // and all projection fields are added to group by keylist;
     List<String> selectExprs = new ArrayList<String>();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java
index 35011e8..78d448a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java
@@ -18,7 +18,8 @@
  */
 package org.apache.lens.cube.parse;
 
-import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.lens.server.api.error.LensException;
+
 
 /**
  * HQL context holding the ql expressions
@@ -29,9 +30,9 @@ public interface HQLContextInterface {
    * Get the HQL query.
    *
    * @return query string
-   * @throws SemanticException
+   * @throws LensException
    */
-  String toHQL() throws SemanticException;
+  String toHQL() throws LensException;
 
   /**
    * Get select expression.

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index 7b99310..586629f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -52,7 +52,7 @@ public final class HQLParser {
   public static final Pattern P_WSPACE = Pattern.compile("\\s+");
 
   public interface ASTNodeVisitor {
-    void visit(TreeNode node) throws SemanticException;
+    void visit(TreeNode node) throws LensException;
   }
 
   public static class TreeNode {
@@ -293,9 +293,9 @@ public final class HQLParser {
    *
    * @param root
    * @param visitor
-   * @throws SemanticException
+   * @throws LensException
    */
-  public static void bft(ASTNode root, ASTNodeVisitor visitor) throws SemanticException {
+  public static void bft(ASTNode root, ASTNodeVisitor visitor) throws LensException {
     if (root == null) {
       throw new NullPointerException("Root cannot be null");
     }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index a6e9340..826a59d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -22,14 +22,15 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.SchemaGraph.TableRelationship;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.CubeQueryContext.OptionalDimCtx;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.*;
 
@@ -421,7 +422,7 @@ class JoinResolver implements ContextRewriter {
     }
 
     public String getFromString(String fromTable, CandidateFact fact, Set<Dimension> qdims,
-      Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql) throws SemanticException {
+      Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql) throws LensException {
       String fromString = fromTable;
       log.info("All paths dump:{}", cubeql.getAutoJoinCtx().getAllPaths());
       if (qdims == null || qdims.isEmpty()) {
@@ -829,7 +830,7 @@ class JoinResolver implements ContextRewriter {
     }
 
     public Set<Dimension> pickOptionalTables(final CandidateFact fact,
-      Set<Dimension> qdims, CubeQueryContext cubeql) throws SemanticException {
+      Set<Dimension> qdims, CubeQueryContext cubeql) throws LensException {
       // Find the min cost join clause and add dimensions in the clause as optional dimensions
       Set<Dimension> joiningOptionalTables = new HashSet<Dimension>();
       if (qdims == null) {
@@ -846,7 +847,7 @@ class JoinResolver implements ContextRewriter {
       }
 
       if (minCostClause == null) {
-        throw new SemanticException(ErrorMsg.NO_JOIN_PATH, qdims.toString(), autoJoinTarget.getName());
+        throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getValue(), qdims.toString(), autoJoinTarget.getName());
       }
 
       log.info("Fact: {} minCostClause:{}", fact, minCostClause);
@@ -876,7 +877,7 @@ class JoinResolver implements ContextRewriter {
           }
         }
         if (cubeql.getCandidateDimTables().get(dim).size() == 0) {
-          throw new SemanticException(ErrorMsg.NO_DIM_HAS_COLUMN, dim.getName(),
+          throw new LensException(LensCubeErrorCode.NO_DIM_HAS_COLUMN.getValue(), dim.getName(),
             minCostClause.chainColumns.get(dim).toString());
         }
       }
@@ -936,28 +937,26 @@ class JoinResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     partialJoinConditions = new HashMap<AbstractCubeTable, String>();
     tableJoinTypeMap = new HashMap<AbstractCubeTable, JoinType>();
-    resolveJoins(cubeql);
+    try {
+      resolveJoins(cubeql);
+    } catch (HiveException e) {
+      throw new LensException(e);
+    }
   }
 
-  private void resolveJoins(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveJoins(CubeQueryContext cubeql) throws LensException, HiveException {
     QB cubeQB = cubeql.getQb();
-    boolean joinResolverDisabled =
-      cubeql.getConf().getBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, CubeQueryConfUtil.DEFAULT_DISABLE_AUTO_JOINS);
+    boolean joinResolverDisabled = cubeql.getConf().getBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS,
+        CubeQueryConfUtil.DEFAULT_DISABLE_AUTO_JOINS);
     if (joinResolverDisabled) {
       if (cubeql.getJoinTree() != null) {
         cubeQB.setQbJoinTree(genJoinTree(cubeQB, cubeql.getJoinTree(), cubeql));
       }
     } else {
-      try {
-        autoResolveJoins(cubeql);
-      } catch (SemanticException e) {
-        throw e;
-      } catch (HiveException e) {
-        throw new SemanticException(e);
-      }
+      autoResolveJoins(cubeql);
     }
   }
 
@@ -980,9 +979,10 @@ class JoinResolver implements ContextRewriter {
    * Resolve joins automatically for the given query.
    *
    * @param cubeql
-   * @throws SemanticException
+   * @throws LensException
+   * @throws HiveException
    */
-  private void autoResolveJoins(CubeQueryContext cubeql) throws HiveException {
+  private void autoResolveJoins(CubeQueryContext cubeql) throws LensException, HiveException {
     // Check if this query needs a join -
     // A join is needed if there is a cube and at least one dimension, or, 0
     // cubes and more than one
@@ -1052,7 +1052,7 @@ class JoinResolver implements ContextRewriter {
           }
           log.warn("No join path between {} and {}", joinee.getName(), target.getName());
           if (cubeql.getDimensions().contains(joinee)) {
-            throw new SemanticException(ErrorMsg.NO_JOIN_PATH, joinee.getName(), target.getName());
+            throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getValue(), joinee.getName(), target.getName());
           } else {
             // if joinee is optional dim table, remove those candidate facts
             Set<CandidateTable> candidates = cubeql.getOptionalDimensionMap().get(joinee).requiredForCandidates;
@@ -1076,19 +1076,19 @@ class JoinResolver implements ContextRewriter {
           }
         }
       } else if (dimensionInJoinChain.get(joinee).size() > 1) {
-        throw new SemanticException("Table " + joinee.getName() + " has "
+        throw new LensException("Table " + joinee.getName() + " has "
           +dimensionInJoinChain.get(joinee).size() + " different paths through joinchains "
           +"(" + dimensionInJoinChain.get(joinee) + ")"
           +" used in query. Couldn't determine which one to use");
       } else {
         // the case when dimension is used only once in all joinchains.
         if (isJoinchainDestination(cubeql, joinee)) {
-          throw new SemanticException("Table " + joinee.getName() + " is getting accessed via two different names: "
+          throw new LensException("Table " + joinee.getName() + " is getting accessed via two different names: "
             + "[" + dimensionInJoinChain.get(joinee).get(0).getName() + ", " + joinee.getName() + "]");
         }
         // table is accessed with chain and no chain
         if (cubeql.getNonChainedDimensions().contains(joinee)) {
-          throw new SemanticException("Table " + joinee.getName() + " is getting accessed via joinchain: "
+          throw new LensException("Table " + joinee.getName() + " is getting accessed via joinchain: "
             + dimensionInJoinChain.get(joinee).get(0).getName() + " and no chain at all");
         }
       }
@@ -1119,7 +1119,7 @@ class JoinResolver implements ContextRewriter {
   }
 
   private void addOptionalTables(CubeQueryContext cubeql, List<SchemaGraph.JoinPath> joinPathList, boolean required)
-    throws SemanticException {
+    throws LensException {
     for (SchemaGraph.JoinPath joinPath : joinPathList) {
       for (TableRelationship rel : joinPath.getEdges()) {
         // Add the joined tables to the queries table sets so that they are
@@ -1129,18 +1129,18 @@ class JoinResolver implements ContextRewriter {
     }
   }
 
-  private void setTarget(CubeMetastoreClient metastore, ASTNode node) throws HiveException {
+  private void setTarget(CubeMetastoreClient metastore, ASTNode node) throws  HiveException, LensException  {
     String targetTableName = HQLParser.getString(HQLParser.findNodeByPath(node, TOK_TABNAME, Identifier));
     if (metastore.isDimension(targetTableName)) {
       target = metastore.getDimension(targetTableName);
     } else if (metastore.isCube(targetTableName)) {
       target = (AbstractCubeTable) metastore.getCube(targetTableName);
     } else {
-      throw new SemanticException(ErrorMsg.JOIN_TARGET_NOT_CUBE_TABLE, targetTableName);
+      throw new LensException(LensCubeErrorCode.JOIN_TARGET_NOT_CUBE_TABLE.getValue(), targetTableName);
     }
   }
 
-  private void searchDimensionTables(CubeMetastoreClient metastore, ASTNode node) throws HiveException {
+  private void searchDimensionTables(CubeMetastoreClient metastore, ASTNode node) throws HiveException, LensException {
     if (node == null) {
       return;
     }
@@ -1195,7 +1195,7 @@ class JoinResolver implements ContextRewriter {
   }
 
   // Recursively find out join conditions
-  private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree, CubeQueryContext cubeql) throws SemanticException {
+  private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree, CubeQueryContext cubeql) throws LensException {
     QBJoinTree joinTree = new QBJoinTree();
     JoinCond[] condn = new JoinCond[1];
 
@@ -1290,7 +1290,7 @@ class JoinResolver implements ContextRewriter {
       cubeql.setJoinCond(joinTree, HQLParser.getString(joinCond));
     } else {
       // No join condition specified. this should be an error
-      throw new SemanticException(ErrorMsg.NO_JOIN_CONDITION_AVAIABLE);
+      throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAIABLE.getValue());
     }
     return joinTree;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
index 7f02ae8..a53e994 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
@@ -21,9 +21,9 @@ package org.apache.lens.cube.parse;
 import java.util.*;
 
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import lombok.extern.slf4j.Slf4j;
 
@@ -36,7 +36,7 @@ class LeastPartitionResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getCube() != null && !cubeql.getCandidateFactSets().isEmpty()) {
       Map<Set<CandidateFact>, Integer> factPartCount = new HashMap<Set<CandidateFact>, Integer>();
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestDimensionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestDimensionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestDimensionResolver.java
index 4ae6226..82410d3 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestDimensionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestDimensionResolver.java
@@ -22,9 +22,9 @@ import java.util.*;
 
 import org.apache.lens.cube.metadata.Dimension;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import lombok.extern.slf4j.Slf4j;
 
@@ -38,7 +38,7 @@ class LightestDimensionResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     if (!cubeql.getCandidateDimTables().isEmpty()) {
       for (Map.Entry<Dimension, Set<CandidateDim>> entry : cubeql.getCandidateDimTables().entrySet()) {
         if (entry.getValue().isEmpty()) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java
index fba682d..97accbb 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java
@@ -22,9 +22,9 @@ package org.apache.lens.cube.parse;
 import java.util.*;
 
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import lombok.extern.slf4j.Slf4j;
 
@@ -37,7 +37,7 @@ public class LightestFactResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getCube() != null && !cubeql.getCandidateFactSets().isEmpty()) {
       Map<Set<CandidateFact>, Double> factWeightMap = new HashMap<Set<CandidateFact>, Double>();
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
index 4d8cbf3..13f1aa4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
@@ -29,7 +29,6 @@ import org.apache.lens.cube.metadata.timeline.RangesPartitionTimeline;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import com.google.common.collect.Maps;
 
@@ -47,7 +46,7 @@ class MaxCoveringFactResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) {
     if (failOnPartialData) {
       // if fail on partial data is true, by the time this resolver starts,
       // all candidate fact sets are covering full time range. We can avoid

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
index b5f5adc..d8515d8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
@@ -20,11 +20,11 @@ package org.apache.lens.cube.parse;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.Dimension;
+import org.apache.lens.server.api.error.LensException;
 
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import com.google.common.collect.Lists;
 
@@ -40,7 +40,7 @@ class MultiFactHQLContext extends SimpleHQLContext {
   private Map<CandidateFact, Set<Dimension>> factDimMap;
 
   MultiFactHQLContext(Set<CandidateFact> facts, Map<Dimension, CandidateDim> dimsToQuery,
-    Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws SemanticException {
+    Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws LensException {
     super();
     this.query = query;
     this.facts = facts;
@@ -48,7 +48,7 @@ class MultiFactHQLContext extends SimpleHQLContext {
     this.factDimMap = factDimMap;
   }
 
-  protected void setMissingExpressions() throws SemanticException {
+  protected void setMissingExpressions() throws LensException {
     setSelect(getSelectString());
     setFrom(getFromString());
     setWhere(getWhereString());
@@ -73,11 +73,11 @@ class MultiFactHQLContext extends SimpleHQLContext {
     return null;
   }
 
-  public String toHQL() throws SemanticException {
+  public String toHQL() throws LensException {
     return query.getInsertClause() + super.toHQL();
   }
 
-  private String getSelectString() throws SemanticException {
+  private String getSelectString() throws LensException {
     Map<Integer, List<Integer>> selectToFactIndex =
       new HashMap<Integer, List<Integer>>(query.getSelectAST().getChildCount());
     int fi = 1;
@@ -93,8 +93,8 @@ class MultiFactHQLContext extends SimpleHQLContext {
     StringBuilder select = new StringBuilder();
     for (int i = 0; i < query.getSelectAST().getChildCount(); i++) {
       if (selectToFactIndex.get(i) == null) {
-        throw new SemanticException(ErrorMsg.EXPRESSION_NOT_IN_ANY_FACT, HQLParser.getString((ASTNode) query
-          .getSelectAST().getChild(i)));
+        throw new LensException(LensCubeErrorCode.EXPRESSION_NOT_IN_ANY_FACT.getValue(),
+            HQLParser.getString((ASTNode) query.getSelectAST().getChild(i)));
       }
       if (selectToFactIndex.get(i).size() == 1) {
         select.append("mq").append(selectToFactIndex.get(i).get(0)).append(".")
@@ -124,7 +124,7 @@ class MultiFactHQLContext extends SimpleHQLContext {
     return facts;
   }
 
-  private String getFromString() throws SemanticException {
+  private String getFromString() throws LensException {
     StringBuilder fromBuilder = new StringBuilder();
     int aliasCount = 1;
     Iterator<CandidateFact> iter = facts.iterator();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
index f3f3f78..067a37a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
@@ -22,8 +22,9 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import lombok.extern.slf4j.Slf4j;
 
@@ -70,11 +71,12 @@ public abstract class SimpleHQLContext implements HQLContextInterface {
    * <p></p>
    * Leaving this empty implementation for the case of all expressions being passed in constructor. If other
    * constructors are used the missing expressions should be set here
+   * @throws LensException
    */
-  protected void setMissingExpressions() throws SemanticException {
+  protected void setMissingExpressions() throws LensException {
   }
 
-  public String toHQL() throws SemanticException {
+  public String toHQL() throws LensException {
     setMissingExpressions();
     String qfmt = getQueryFormat();
     Object[] queryTreeStrings = getQueryTreeStrings();
@@ -85,7 +87,7 @@ public abstract class SimpleHQLContext implements HQLContextInterface {
     return baseQuery;
   }
 
-  private String[] getQueryTreeStrings() throws SemanticException {
+  private String[] getQueryTreeStrings() throws LensException {
     List<String> qstrs = new ArrayList<String>();
     qstrs.add(select);
     qstrs.add(from);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
index b63111b..60b2dde 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
@@ -22,10 +22,11 @@ import java.util.Map;
 
 import org.apache.lens.cube.metadata.Dimension;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 /**
  * HQL context class which passes down all query strings to come from DimOnlyHQLContext and works with fact being
@@ -39,13 +40,13 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
   private String storageAlias;
 
   SingleFactHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
-    throws SemanticException {
+    throws LensException {
     super(dimsToQuery, query);
     this.fact = fact;
   }
 
   SingleFactHQLContext(CandidateFact fact, String storageAlias, Map<Dimension, CandidateDim> dimsToQuery,
-      CubeQueryContext query, String whereClause) throws SemanticException {
+      CubeQueryContext query, String whereClause) throws LensException {
     super(dimsToQuery, query, whereClause);
     this.fact = fact;
     this.storageAlias = storageAlias;
@@ -56,7 +57,7 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
     return fact;
   }
 
-  static void addRangeClauses(CubeQueryContext query, CandidateFact fact) throws SemanticException {
+  static void addRangeClauses(CubeQueryContext query, CandidateFact fact) throws LensException {
     if (fact != null) {
       // resolve timerange positions and replace it by corresponding where
       // clause
@@ -70,7 +71,7 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
             try {
               rangeAST = HQLParser.parseExpr(rangeWhere);
             } catch (ParseException e) {
-              throw new SemanticException(e);
+              throw new LensException(e);
             }
             rangeAST.setParent(range.getParent());
             range.getParent().setChild(range.getChildIndex(), rangeAST);
@@ -83,7 +84,7 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
 
 
   @Override
-  protected String getFromTable() throws SemanticException {
+  protected String getFromTable() throws LensException {
     if (getQuery().getAutoJoinCtx() != null && getQuery().getAutoJoinCtx().isJoinsResolved()) {
       if (storageAlias != null) {
         return storageAlias;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
index 4ad2f1f..15a98dd 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
@@ -23,8 +23,7 @@ import java.util.ArrayList;
 import java.util.Map;
 
 import org.apache.lens.cube.metadata.Dimension;
-
-import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.lens.server.api.error.LensException;
 
 import lombok.Getter;
 
@@ -36,14 +35,14 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
   private CandidateFact fact = null;
 
   SingleFactMultiStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
-    throws SemanticException {
+    throws LensException {
     this.query = query;
     this.fact = fact;
     setUnionContexts(fact, dimsToQuery, query);
   }
 
   private void setUnionContexts(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
-    throws SemanticException {
+    throws LensException {
     hqlContexts = new ArrayList<HQLContextInterface>();
     String alias = getQuery().getAliasForTableName(getQuery().getCube().getName());
     for (String storageTable : fact.getStorageTables()) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index aa76c0c..58d0fa7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -42,7 +42,6 @@ import org.apache.lens.server.api.error.LensException;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.util.ReflectionUtils;
 
 import com.google.common.collect.Lists;
@@ -126,7 +125,7 @@ class StorageTableResolver implements ContextRewriter {
   Map<String, List<String>> storagePartMap = new HashMap<String, List<String>>();
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     client = cubeql.getMetastoreClient();
 
     switch (phase) {
@@ -159,7 +158,7 @@ class StorageTableResolver implements ContextRewriter {
     phase = phase.next();
   }
 
-  private void resolveDimStorageTablesAndPartitions(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveDimStorageTablesAndPartitions(CubeQueryContext cubeql) throws LensException {
     Set<Dimension> allDims = new HashSet<Dimension>(cubeql.getDimensions());
     allDims.addAll(cubeql.getOptionalDimensions());
     for (Dimension dim : allDims) {
@@ -234,7 +233,7 @@ class StorageTableResolver implements ContextRewriter {
   }
 
   // Resolves all the storage table names, which are valid for each updatePeriod
-  private void resolveFactStorageTableNames(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveFactStorageTableNames(CubeQueryContext cubeql) throws LensException {
     Iterator<CandidateFact> i = cubeql.getCandidateFacts().iterator();
     while (i.hasNext()) {
       CubeFactTable fact = i.next().fact;
@@ -317,7 +316,7 @@ class StorageTableResolver implements ContextRewriter {
   }
 
   private TimeRange getFallbackRange(TimeRange range, CandidateFact cfact, CubeQueryContext cubeql)
-    throws SemanticException {
+    throws LensException {
     Cube baseCube = cubeql.getBaseCube();
     try {
       ArrayList<String> tableNames = Lists.newArrayList(cfact.fact.getName(), cubeql.getCube().getName());
@@ -349,11 +348,11 @@ class StorageTableResolver implements ContextRewriter {
         .toDate(diff1.negativeOffsetFrom(range.getToDate()))
         .partitionColumn(fallbackPartCol).build();
     } catch (HiveException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
   }
 
-  private void resolveFactStoragePartitions(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveFactStoragePartitions(CubeQueryContext cubeql) throws LensException {
     // Find candidate tables wrt supported storages
     Iterator<CandidateFact> i = cubeql.getCandidateFacts().iterator();
     Map<TimeRange, String> whereClauseForFallback = new LinkedHashMap<TimeRange, String>();
@@ -505,12 +504,12 @@ class StorageTableResolver implements ContextRewriter {
 
   private Set<FactPartition> getPartitions(CubeFactTable fact, TimeRange range,
     HashMap<String, SkipStorageCause> skipStorageCauses,
-    PartitionRangesForPartitionColumns missingPartitions) throws SemanticException {
+    PartitionRangesForPartitionColumns missingPartitions) throws LensException {
     try {
       return getPartitions(fact, range, getValidUpdatePeriods(fact), true, skipStorageCauses,
         missingPartitions);
     } catch (Exception e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
index 03732cb..5444e71 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
@@ -24,12 +24,12 @@ import java.util.Calendar;
 import java.util.Date;
 import java.util.TreeSet;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.UpdatePeriod;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 
@@ -104,13 +104,13 @@ public class TimeRange {
 
   }
 
-  public void validate() throws SemanticException {
+  public void validate() throws LensException {
     if (partitionColumn == null || fromDate == null || toDate == null || fromDate.equals(toDate)) {
-      throw new SemanticException(ErrorMsg.INVALID_TIME_RANGE);
+      throw new LensException(LensCubeErrorCode.INVALID_TIME_RANGE.getValue());
     }
 
     if (fromDate.after(toDate)) {
-      throw new SemanticException(ErrorMsg.FROM_AFTER_TO, fromDate.toString(), toDate.toString());
+      throw new LensException(LensCubeErrorCode.FROM_AFTER_TO.getValue(), fromDate.toString(), toDate.toString());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeWriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeWriter.java
index 12acf98..08f957e 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeWriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeWriter.java
@@ -21,10 +21,9 @@ package org.apache.lens.cube.parse;
 import java.util.Set;
 
 import org.apache.lens.cube.metadata.FactPartition;
-
-import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.lens.server.api.error.LensException;
 
 public interface TimeRangeWriter {
   String getTimeRangeWhereClause(CubeQueryContext cubeQueryContext, String tableName, Set<FactPartition> parts)
-    throws SemanticException;
+    throws LensException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
index 91c0c75..fb1c89e 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
@@ -26,6 +26,7 @@ import java.util.*;
 
 import org.apache.lens.cube.error.ColUnAvailableInTimeRange;
 import org.apache.lens.cube.error.ColUnAvailableInTimeRangeException;
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.AbstractCubeTable;
 import org.apache.lens.cube.metadata.CubeColumn;
 import org.apache.lens.cube.metadata.Dimension;
@@ -35,9 +36,7 @@ import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 
 import com.google.common.collect.Lists;
@@ -52,7 +51,7 @@ class TimerangeResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException, LensException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getCube() == null) {
       return;
     }
@@ -62,19 +61,19 @@ class TimerangeResolver implements ContextRewriter {
   }
 
 
-  private void extractTimeRange(CubeQueryContext cubeql) throws SemanticException {
+  private void extractTimeRange(CubeQueryContext cubeql) throws LensException {
     // get time range -
     // Time range should be direct child of where condition
     // TOK_WHERE.TOK_FUNCTION.Identifier Or, it should be right hand child of
     // AND condition TOK_WHERE.KW_AND.TOK_FUNCTION.Identifier
     if (cubeql.getWhereAST() == null || cubeql.getWhereAST().getChildCount() < 1) {
-      throw new SemanticException(ErrorMsg.NO_TIMERANGE_FILTER);
+      throw new LensException(LensCubeErrorCode.NO_TIMERANGE_FILTER.getValue());
     }
     searchTimeRanges(cubeql.getWhereAST(), cubeql, null, 0);
   }
 
   private void searchTimeRanges(ASTNode root, CubeQueryContext cubeql, ASTNode parent, int childIndex)
-    throws SemanticException {
+    throws LensException {
     if (root == null) {
       return;
     } else if (root.getToken().getType() == TOK_FUNCTION) {
@@ -104,7 +103,7 @@ class TimerangeResolver implements ContextRewriter {
   }
 
   private void processTimeRangeFunction(CubeQueryContext cubeql, ASTNode timenode, ASTNode parent, int childIndex)
-    throws SemanticException {
+    throws LensException {
     TimeRange.TimeRangeBuilder builder = TimeRange.getBuilder();
     builder.astNode(timenode);
     builder.parent(parent);
@@ -113,7 +112,7 @@ class TimerangeResolver implements ContextRewriter {
     String timeDimName = getColumnName((ASTNode) timenode.getChild(1));
 
     if (!cubeql.getCube().getTimedDimensions().contains(timeDimName)) {
-      throw new SemanticException(ErrorMsg.NOT_A_TIMED_DIMENSION, timeDimName);
+      throw new LensException(LensCubeErrorCode.NOT_A_TIMED_DIMENSION.getValue(), timeDimName);
     }
     // Replace timeDimName with column which is used for partitioning. Assume
     // the same column
@@ -143,7 +142,7 @@ class TimerangeResolver implements ContextRewriter {
     cubeql.getTimeRanges().add(range);
   }
 
-  private void doColLifeValidation(CubeQueryContext cubeql) throws SemanticException,
+  private void doColLifeValidation(CubeQueryContext cubeql) throws LensException,
     ColUnAvailableInTimeRangeException {
     Set<String> cubeColumns = cubeql.getColumnsQueried(cubeql.getCube().getName());
     if (cubeColumns == null || cubeColumns.isEmpty()) {
@@ -156,7 +155,7 @@ class TimerangeResolver implements ContextRewriter {
       for (TimeRange range : cubeql.getTimeRanges()) {
         if (column == null) {
           if (!cubeql.getCube().getTimedDimensions().contains(col)) {
-            throw new SemanticException(ErrorMsg.NOT_A_CUBE_COLUMN, col);
+            throw new LensException(LensCubeErrorCode.NOT_A_CUBE_COLUMN.getValue(), col);
           }
           continue;
         }
@@ -214,8 +213,9 @@ class TimerangeResolver implements ContextRewriter {
                 joinPathIterator.remove();
                 if (joinPaths.isEmpty()) {
                   // This dimension doesn't have any paths left
-                  throw new SemanticException(ErrorMsg.NO_JOIN_PATH, "No valid join path available for dimension "
-                    + dimension + " which would satisfy time range " + range.getFromDate() + "-" + range.getToDate());
+                  throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getValue(),
+                      "No valid join path available for dimension " + dimension + " which would satisfy time range "
+                          + range.getFromDate() + "-" + range.getToDate());
                 }
               }
             } // End loop to remove path

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
index e6ed86b..9005826 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
@@ -24,9 +24,10 @@ import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.commons.lang.NotImplementedException;
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import lombok.AllArgsConstructor;
 import lombok.Getter;
@@ -42,7 +43,7 @@ public abstract class UnionHQLContext implements HQLContextInterface {
   List<HQLContextInterface> hqlContexts = new ArrayList<HQLContextInterface>();
 
   @Override
-  public String toHQL() throws SemanticException {
+  public String toHQL() throws LensException {
     Set<String> queryParts = new LinkedHashSet<String>();
     for (HQLContextInterface ctx : hqlContexts) {
       queryParts.add(ctx.toHQL());

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/ValidationRule.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ValidationRule.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ValidationRule.java
index 9567845..558e411 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ValidationRule.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ValidationRule.java
@@ -18,8 +18,9 @@
  */
 package org.apache.lens.cube.parse;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 public abstract class ValidationRule {
   Configuration conf;
@@ -29,7 +30,7 @@ public abstract class ValidationRule {
     this.conf = conf;
   }
 
-  public abstract boolean validate(CubeQueryContext ctx) throws SemanticException;
+  public abstract boolean validate(CubeQueryContext ctx) throws  LensException;
 
   public String getErrorMessage() {
     return error;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index ae8984f..13eca27 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -746,7 +746,8 @@ public class CubeTestSetup {
       .createDerivedCube(TEST_CUBE_NAME, DERIVED_CUBE_NAME, measures, dimensions, new HashMap<String, String>(), 5L);
   }
 
-  private void createBaseAndDerivedCubes(CubeMetastoreClient client) throws HiveException, ParseException {
+  private void createBaseAndDerivedCubes(CubeMetastoreClient client)
+    throws HiveException, ParseException, LensException {
     Set<CubeMeasure> cubeMeasures2 = new HashSet<CubeMeasure>(cubeMeasures);
     Set<CubeDimAttribute> cubeDimensions2 = new HashSet<CubeDimAttribute>(cubeDimensions);
     cubeMeasures2.add(new ColumnMeasure(new FieldSchema("msr11", "int", "first measure")));
@@ -906,7 +907,7 @@ public class CubeTestSetup {
     createBaseCubeFacts(client);
   }
 
-  private void createBaseCubeFacts(CubeMetastoreClient client) throws HiveException {
+  private void createBaseCubeFacts(CubeMetastoreClient client) throws HiveException, LensException {
 
     Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
     Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
index 501a4b0..0fea9f1 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
@@ -34,7 +34,6 @@ import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
@@ -50,7 +49,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithDimensionAndMeasure() throws SemanticException, ParseException, LensException {
+  public void testQueryWithDimensionAndMeasure() throws ParseException, LensException {
 
     /* If all the queried dimensions are present in a derived cube, and one of the queried measure is not present in
     the same derived cube, then query shall be disallowed.
@@ -63,7 +62,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithDimensionAndMeasureInExpression() throws SemanticException, ParseException, LensException {
+  public void testQueryWithDimensionAndMeasureInExpression() throws ParseException, LensException {
 
     /* If all the queried dimensions are present in a derived cube, and one of the queried measure is not present in
     the same derived cube, then query shall be disallowed.
@@ -76,7 +75,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithDimensionInExpressionAndMeasure() throws SemanticException, ParseException, LensException {
+  public void testQueryWithDimensionInExpressionAndMeasure() throws ParseException, LensException {
 
     /* If all the queried dimensions are present in a derived cube, and one of the queried measure is not present in
     the same derived cube, then query shall be disallowed.
@@ -89,7 +88,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithDimensionAndMeasureInExpressions() throws SemanticException, ParseException, LensException {
+  public void testQueryWithDimensionAndMeasureInExpressions() throws ParseException, LensException {
 
     /* If all the queried dimensions are present in a derived cube, and one of the queried measure is not present in
     the same derived cube, then query shall be disallowed.
@@ -102,7 +101,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithChainReferencedDimensionAttributeAndMeasure() throws SemanticException, ParseException,
+  public void testQueryWithChainReferencedDimensionAttributeAndMeasure() throws ParseException,
       LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in select. If the
@@ -118,7 +117,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithChainReferencedDimensionAttributeAndExprMeasure() throws SemanticException, ParseException,
+  public void testQueryWithChainReferencedDimensionAttributeAndExprMeasure() throws ParseException,
       LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in select. If the
@@ -134,7 +133,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithDimExprWithChainRefAndExprMeasure() throws SemanticException, ParseException,
+  public void testQueryWithDimExprWithChainRefAndExprMeasure() throws ParseException,
       LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in select. If the
@@ -150,7 +149,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithMeasureAndChainReferencedDimAttributeInFilter() throws SemanticException, ParseException,
+  public void testQueryWithMeasureAndChainReferencedDimAttributeInFilter() throws ParseException,
       LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in filter. If the
@@ -166,7 +165,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithExprMeasureAndChainReferencedDimAttributeInFilter() throws SemanticException, ParseException,
+  public void testQueryWithExprMeasureAndChainReferencedDimAttributeInFilter() throws ParseException,
       LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in filter. If the
@@ -182,7 +181,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithExprMeasureAndDimExprWithChainRefInFilter() throws SemanticException, ParseException,
+  public void testQueryWithExprMeasureAndDimExprWithChainRefInFilter() throws ParseException,
       LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in filter. If the
@@ -199,7 +198,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithOnlyMeasure() throws ParseException, SemanticException, LensException {
+  public void testQueryWithOnlyMeasure() throws ParseException, LensException {
 
     /* A query which contains only measure should pass, if the measure is present in some derived cube.
     msr1 is present in one of the derived cubes, hence query shall pass without any exception. */
@@ -208,7 +207,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithOnlyExprMeasure() throws ParseException, SemanticException, LensException {
+  public void testQueryWithOnlyExprMeasure() throws ParseException, LensException {
 
     /* A query which contains only measure should pass, if the measure is present in some derived cube.
     roundedmsr1 ( an expression over msr1) is present in one of the derived cubes, hence query shall pass without
@@ -219,7 +218,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
 
   @Test
   public void testQueryWithMeasureAndChainReferencedDimAttributeInCaseStatement() throws ParseException,
-      SemanticException, LensException {
+      LensException {
 
     /* In this query a dimension attribute referenced through join chain name is used in case statement.
     A query which contains such a dim attribute and a measure is allowed even if the source column of the used dim
@@ -233,7 +232,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithDimAttributesNotInSameDerviedCube() throws ParseException, SemanticException, LensException {
+  public void testQueryWithDimAttributesNotInSameDerviedCube() throws ParseException, LensException {
 
     /* dim2 and countryid are not present in the same derived cube, hence query should be disallowed */
 
@@ -243,7 +242,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
 
   @Test
   public void testQueryWithDimExpressionssNotInSameDerviedCube()
-    throws ParseException, SemanticException, LensException {
+    throws ParseException, LensException {
 
     /* dim2, source columns of cubestate and countryid are not present in the same derived cube, hence query should be
      *  disallowed */
@@ -253,7 +252,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithMeasureNotInAnyDerviedCube() throws ParseException, SemanticException, LensException {
+  public void testQueryWithMeasureNotInAnyDerviedCube() throws ParseException, LensException {
 
     /* newmeasure is not present in any derived cube, hence the query should be disallowed. */
 
@@ -262,7 +261,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithExprMeasureNotInAnyDerviedCube() throws ParseException, SemanticException, LensException {
+  public void testQueryWithExprMeasureNotInAnyDerviedCube() throws ParseException, LensException {
 
     /* newexpr : expression over newmeasure is not present in any derived cube, hence the query should be disallowed. */
 
@@ -271,7 +270,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWithReferencedDimAttributeAndMeasure() throws SemanticException, ParseException,
+  public void testQueryWithReferencedDimAttributeAndMeasure() throws ParseException,
       LensException {
 
     /* In this query a referenced dimension attribute is used in select statement. If the source column for such a
@@ -288,8 +287,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWtihTimeDimAndReplaceTimeDimSwitchTrue() throws ParseException, SemanticException,
-      LensException {
+  public void testQueryWtihTimeDimAndReplaceTimeDimSwitchTrue() throws ParseException, LensException {
 
     /* If a time dimension and measure are not present in the same derived cube, then query shall be disallowed.
 
@@ -309,8 +307,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   @Test
-  public void testQueryWtihTimeDimAndReplaceTimeDimSwitchFalse() throws ParseException, SemanticException,
-      LensException {
+  public void testQueryWtihTimeDimAndReplaceTimeDimSwitchFalse() throws ParseException, LensException {
 
     /* If a time dimension and measure are not present in the same derived cube, then query shall be disallowed.
 
@@ -330,13 +327,13 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   }
 
   private void testFieldsCannotBeQueriedTogetherError(final String testQuery, final List<String> conflictingFields)
-    throws ParseException, SemanticException, LensException {
+    throws ParseException, LensException {
     testFieldsCannotBeQueriedTogetherError(testQuery, conflictingFields, conf);
   }
 
   private void testFieldsCannotBeQueriedTogetherError(final String testQuery, final List<String> conflictingFields,
       final Configuration queryConf)
-    throws ParseException, SemanticException, LensException {
+    throws ParseException, LensException {
 
     try {
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
index 13058e2..9791502 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
@@ -26,7 +26,6 @@ import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.testng.Assert;
 import org.testng.annotations.BeforeTest;
@@ -147,7 +146,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
   }
 
   @Test
-  public void testDimOnlyDistinctQuery() throws SemanticException, ParseException, LensException {
+  public void testDimOnlyDistinctQuery() throws ParseException, LensException {
 
     conf.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, false);
 
@@ -196,7 +195,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
   }
 
   @Test
-  public void testAggregateResolverOff() throws SemanticException, ParseException, LensException {
+  public void testAggregateResolverOff() throws ParseException, LensException {
     Configuration conf2 = getConfWithStorages("C1,C2");
     conf2.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, true);
 
@@ -218,7 +217,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     rawFactSelectionTests(conf2);
   }
 
-  private void aggregateFactSelectionTests(Configuration conf) throws SemanticException, ParseException, LensException {
+  private void aggregateFactSelectionTests(Configuration conf) throws ParseException, LensException {
     String query = "SELECT count(distinct cityid) from testcube where " + TWO_DAYS_RANGE;
     CubeQueryContext cubeql = rewriteCtx(query, conf);
     String hQL = cubeql.toHQL();
@@ -262,7 +261,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     compareQueries(expectedQL, hQL);
   }
 
-  private void rawFactSelectionTests(Configuration conf) throws SemanticException, ParseException, LensException {
+  private void rawFactSelectionTests(Configuration conf) throws ParseException, LensException {
     // Check a query with non default aggregate function
     String query = "SELECT cityid, avg(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE;
     CubeQueryContext cubeql = rewriteCtx(query, conf);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 0d0b927..9120a70 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -32,15 +32,14 @@ import java.util.*;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.time.DateUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.antlr.runtime.CommonToken;
 import org.testng.Assert;
@@ -66,16 +65,16 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
 
   @Test
   public void testColumnErrors() throws Exception {
-    SemanticException e;
+    LensException e;
 
-    e = getSemanticExceptionInRewrite("select msr11 + msr2 from basecube" + " where " + TWO_DAYS_RANGE, conf);
-    assertEquals(e.getCanonicalErrorMsg().getErrorCode(),
-      ErrorMsg.EXPRESSION_NOT_IN_ANY_FACT.getErrorCode());
+    e = getLensExceptionInRewrite("select msr11 + msr2 from basecube" + " where " + TWO_DAYS_RANGE, conf);
+    assertEquals(e.getErrorCode(),
+        LensCubeErrorCode.EXPRESSION_NOT_IN_ANY_FACT.getValue());
     // no fact has the all the dimensions queried
-    e = getSemanticExceptionInRewrite("select dim1, test_time_dim, msr3, msr13 from basecube where "
+    e = getLensExceptionInRewrite("select dim1, test_time_dim, msr3, msr13 from basecube where "
       + TWO_DAYS_RANGE, conf);
-    assertEquals(e.getCanonicalErrorMsg().getErrorCode(),
-      ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE.getErrorCode());
+    assertEquals(e.getErrorCode(),
+        LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getValue());
     PruneCauses.BriefAndDetailedError pruneCauses = extractPruneCause(e);
     String regexp = String.format(CandidateTablePruneCause.CandidateTablePruneCode.COLUMN_NOT_FOUND.errorFormat,
       "Column Sets: (.*?)", "queriable together");
@@ -438,8 +437,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     // If going to fallback timedim, and partitions are missing, then error should be missing partition on that
     conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C4");
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
-    SemanticException exc =
-      getSemanticExceptionInRewrite("cube select msr12 from basecube where " + TWO_DAYS_RANGE, conf);
+    LensException exc =
+      getLensExceptionInRewrite("cube select msr12 from basecube where " + TWO_DAYS_RANGE, conf);
     PruneCauses.BriefAndDetailedError pruneCause = extractPruneCause(exc);
     assertTrue(pruneCause.getBrief().contains("Missing partitions"));
     assertEquals(pruneCause.getDetails().get("testfact2_base").iterator().next().getCause(), MISSING_PARTITIONS);


[3/3] incubator-lens git commit: LENS-187 : Move cube specific error message codes from Hive code to Lens

Posted by am...@apache.org.
LENS-187 : Move cube specific error message codes from Hive code to Lens


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/3dc348ac
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/3dc348ac
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/3dc348ac

Branch: refs/heads/master
Commit: 3dc348ac2793b5394f0ef58bba6da6f9933c5e1d
Parents: c368595
Author: Sushil Mohanty <su...@gmail.com>
Authored: Wed Aug 19 14:30:35 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Aug 19 14:30:35 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/lens-errors.conf    | 162 +++++++++++++++++++
 .../lens/cube/error/LensCubeErrorCode.java      |  29 +++-
 .../cube/parse/AbridgedTimeRangeWriter.java     |   8 +-
 .../lens/cube/parse/AggregateResolver.java      |  14 +-
 .../apache/lens/cube/parse/AliasReplacer.java   |  21 +--
 .../lens/cube/parse/BetweenTimeRangeWriter.java |  13 +-
 .../apache/lens/cube/parse/CandidateFact.java   |  20 +--
 .../lens/cube/parse/CandidateTableResolver.java |  41 ++---
 .../lens/cube/parse/CheckColumnMapping.java     |   5 +-
 .../apache/lens/cube/parse/CheckTableNames.java |   5 +-
 .../apache/lens/cube/parse/ColumnResolver.java  |  18 +--
 .../apache/lens/cube/parse/ContextRewriter.java |   6 +-
 .../lens/cube/parse/CubeQueryContext.java       |  73 ++++-----
 .../lens/cube/parse/CubeQueryRewriter.java      |  30 ++--
 .../lens/cube/parse/CubeSemanticAnalyzer.java   |  41 +----
 .../org/apache/lens/cube/parse/DateUtil.java    |  24 +--
 .../cube/parse/DenormalizationResolver.java     |  26 +--
 .../apache/lens/cube/parse/DimHQLContext.java   |  12 +-
 .../lens/cube/parse/DimOnlyHQLContext.java      |  11 +-
 .../lens/cube/parse/ExpressionResolver.java     |  75 ++++-----
 .../apache/lens/cube/parse/FactHQLContext.java  |   7 +-
 .../apache/lens/cube/parse/FieldValidator.java  |  12 +-
 .../apache/lens/cube/parse/GroupbyResolver.java |  12 +-
 .../lens/cube/parse/HQLContextInterface.java    |   7 +-
 .../org/apache/lens/cube/parse/HQLParser.java   |   6 +-
 .../apache/lens/cube/parse/JoinResolver.java    |  58 +++----
 .../lens/cube/parse/LeastPartitionResolver.java |   4 +-
 .../cube/parse/LightestDimensionResolver.java   |   4 +-
 .../lens/cube/parse/LightestFactResolver.java   |   4 +-
 .../cube/parse/MaxCoveringFactResolver.java     |   3 +-
 .../lens/cube/parse/MultiFactHQLContext.java    |  18 +--
 .../lens/cube/parse/SimpleHQLContext.java       |  10 +-
 .../lens/cube/parse/SingleFactHQLContext.java   |  13 +-
 .../parse/SingleFactMultiStorageHQLContext.java |   7 +-
 .../lens/cube/parse/StorageTableResolver.java   |  17 +-
 .../org/apache/lens/cube/parse/TimeRange.java   |  10 +-
 .../apache/lens/cube/parse/TimeRangeWriter.java |   5 +-
 .../lens/cube/parse/TimerangeResolver.java      |  24 +--
 .../apache/lens/cube/parse/UnionHQLContext.java |   5 +-
 .../apache/lens/cube/parse/ValidationRule.java  |   5 +-
 .../apache/lens/cube/parse/CubeTestSetup.java   |   5 +-
 .../FieldsCannotBeQueriedTogetherTest.java      |  47 +++---
 .../lens/cube/parse/TestAggregateResolver.java  |   9 +-
 .../lens/cube/parse/TestBaseCubeQueries.java    |  21 ++-
 .../lens/cube/parse/TestCubeRewriter.java       |  58 +++----
 .../apache/lens/cube/parse/TestDateUtil.java    |   6 +-
 .../cube/parse/TestDenormalizationResolver.java |  36 ++---
 .../lens/cube/parse/TestExpressionResolver.java |  32 ++--
 .../lens/cube/parse/TestJoinResolver.java       |  35 ++--
 .../lens/cube/parse/TestQueryRewrite.java       |  39 +++--
 .../lens/cube/parse/TestRewriterPlan.java       |   4 +-
 .../lens/cube/parse/TestTimeRangeExtractor.java |  14 +-
 .../lens/cube/parse/TestTimeRangeResolver.java  |   7 +-
 .../lens/cube/parse/TestTimeRangeWriter.java    |  17 +-
 .../parse/TestTimeRangeWriterWithQuery.java     |  10 +-
 .../driver/jdbc/TestColumnarSQLRewriter.java    |  54 +++----
 .../lens/server/api/error/LensException.java    |  12 +-
 .../UnSupportedQuerySubmitOpException.java      |   2 +-
 .../apache/lens/server/rewrite/RewriteUtil.java |  14 +-
 .../lens/server/query/TestQueryService.java     |  10 +-
 .../lens/server/rewrite/TestRewriting.java      |  15 +-
 61 files changed, 741 insertions(+), 571 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-api/src/main/resources/lens-errors.conf
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/lens-errors.conf b/lens-api/src/main/resources/lens-errors.conf
index e6715b6..3fb191e 100644
--- a/lens-api/src/main/resources/lens-errors.conf
+++ b/lens-api/src/main/resources/lens-errors.conf
@@ -94,6 +94,168 @@ lensCubeErrors = [
     payloadClass = org.apache.lens.cube.error.ConflictingFields
   }
 
+  {
+   errorCode = 3004
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No reference column available for : %s "
+  }
+
+ {
+   errorCode = 3005
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "More than one cube accessed in query : %s and %s"
+ }
+
+ {
+   errorCode = 3006
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Neither cube nor dimensions accessed in the query"
+ }
+
+ {
+   errorCode = 3007
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No timerange filter specified"
+ }
+
+ {
+   errorCode = 3008
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "%s is not timed dimension"
+ }
+
+ {
+   errorCode = 3009
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Error in parsing input date format. Expected format %s, date provided %s"
+ }
+
+ {
+   errorCode = 3010
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Date value cannot be null or empty"
+ }
+
+ {
+   errorCode = 3011
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Invalid time unit %s"
+ }
+
+ {
+   errorCode = 3012
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Selecting all columns is not supported"
+ }
+
+ {
+   errorCode = 3013
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Ambiguous column %s, in dimensions %s and %s"
+ }
+
+ {
+   errorCode = 3014
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Ambiguous column %s, in cube: %s and in dimension: %s"
+ }
+
+ {
+   errorCode = 3015
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Could not find the table containing column: %s"
+ }
+
+ {
+   errorCode = 3016
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "%s : Not a cube column"
+ }
+
+ {
+   errorCode = 3017
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No candidate fact table available to answer the query, because %s"
+ }
+
+ {
+   errorCode = 3018
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No join condition available"
+ }
+
+ {
+   errorCode = 3019
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No storage table available for candidate fact: %s"
+ }
+
+ {
+   errorCode = 3020
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Default aggregate is not set for measure: %s"
+ }
+
+ {
+   errorCode = 3021
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Invalid time range"
+ }
+
+ {
+   errorCode = 3022
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "From date: %s  should be smaller than to date: %s"
+ }
+
+ {
+   errorCode = 3023
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No join path defined from %s to %s"
+ }
+
+ {
+   errorCode = 3024
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Join target table: %s is neither dimension nor cube"
+ }
+
+ {
+   errorCode = 3025
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No fact table has the queried columns : %s"
+ }
+
+ {
+   errorCode = 3026
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No candidate dimension storage tables for dimension because %s"
+ }
+
+ {
+   errorCode = 3027
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No dimension table has the queried columns for %s, columns: %s"
+ }
+
+ {
+   errorCode = 3028
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "No candidate dimension table available for %s to answer the query, because %s"
+ }
+
+ {
+   errorCode = 3029
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Configured timerange writer cannot be used. Reason %s"
+ }
+
+ {
+   errorCode = 3030
+   httpStatusCode = ${BAD_REQUEST}
+   errorMsg = "Expression %s is not available in any fact"
+ }
+
 ]
 
 # Overriding errors in lens-errors.conf via lens-errors-override.conf:

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
index 0006b22..1fe74e2 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
@@ -22,7 +22,34 @@ public enum LensCubeErrorCode {
 
   SYNTAX_ERROR(3001),
   COLUMN_UNAVAILABLE_IN_TIME_RANGE(3002),
-  FIELDS_CANNOT_BE_QUERIED_TOGETHER(3003);
+  FIELDS_CANNOT_BE_QUERIED_TOGETHER(3003),
+  NO_REF_COL_AVAILABLE(3004),
+  MORE_THAN_ONE_CUBE(3005),
+  NEITHER_CUBE_NOR_DIMENSION(3006),
+  NO_TIMERANGE_FILTER(3007),
+  NOT_A_TIMED_DIMENSION(3008),
+  WRONG_TIME_RANGE_FORMAT(3009),
+  NULL_DATE_VALUE(3010),
+  INVALID_TIME_UNIT(3011),
+  ALL_COLUMNS_NOT_SUPPORTED(3012),
+  AMBIGOUS_DIM_COLUMN(3013),
+  AMBIGOUS_CUBE_COLUMN(3014),
+  COLUMN_NOT_FOUND(3015),
+  NOT_A_CUBE_COLUMN(3016),
+  NO_CANDIDATE_FACT_AVAILABLE(3017),
+  NO_JOIN_CONDITION_AVAIABLE(3018),
+  NO_STORAGE_TABLE_AVAIABLE(3019),
+  NO_DEFAULT_AGGREGATE(3020),
+  INVALID_TIME_RANGE(3021),
+  FROM_AFTER_TO(3022),
+  NO_JOIN_PATH(3023),
+  JOIN_TARGET_NOT_CUBE_TABLE(3024),
+  NO_FACT_HAS_COLUMN(3025),
+  NO_CANDIDATE_DIM_STORAGE_TABLES(3026),
+  NO_DIM_HAS_COLUMN(3027),
+  NO_CANDIDATE_DIM_AVAILABLE(3028),
+  CANNOT_USE_TIMERANGE_WRITER(3029),
+  EXPRESSION_NOT_IN_ANY_FACT(3030);
 
   public int getValue() {
     return this.errorCode;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java
index d5276e0..2caea56 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java
@@ -22,9 +22,9 @@ package org.apache.lens.cube.parse;
 import java.util.*;
 
 import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
@@ -42,12 +42,12 @@ public class AbridgedTimeRangeWriter implements TimeRangeWriter {
    * @param tableName
    * @param parts
    * @return
-   * @throws SemanticException
+   * @throws LensException
    */
   @Override
   public String getTimeRangeWhereClause(CubeQueryContext cubeQueryContext,
     String tableName,
-    Set<FactPartition> parts) throws SemanticException {
+    Set<FactPartition> parts) throws LensException {
     if (parts == null || parts.isEmpty()) {
       return "";
     }
@@ -73,7 +73,7 @@ public class AbridgedTimeRangeWriter implements TimeRangeWriter {
 
   private String getClause(CubeQueryContext cubeQueryContext,
     String tableName,
-    Set<FactPartition> parts) throws SemanticException {
+    Set<FactPartition> parts) throws LensException {
     Map<String, List<String>> partFilterMap = new HashMap<String, List<String>>();
     List<String> allTimeRangeFilters = new ArrayList<String>();
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
index f880495..9c0f936 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
@@ -24,16 +24,16 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TABLE_OR_COL;
 
 import java.util.Iterator;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.CubeMeasure;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.antlr.runtime.CommonToken;
 
@@ -51,7 +51,7 @@ class AggregateResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getCube() == null) {
       return;
     }
@@ -110,7 +110,7 @@ class AggregateResolver implements ContextRewriter {
   // We need to traverse the clause looking for eligible measures which can be
   // wrapped inside aggregates
   // We have to skip any columns that are already inside an aggregate UDAF
-  private String resolveClause(CubeQueryContext cubeql, ASTNode clause) throws SemanticException {
+  private String resolveClause(CubeQueryContext cubeql, ASTNode clause) throws LensException {
 
     if (clause == null) {
       return null;
@@ -123,7 +123,7 @@ class AggregateResolver implements ContextRewriter {
     return HQLParser.getString(clause);
   }
 
-  private void transform(CubeQueryContext cubeql, ASTNode parent, ASTNode node, int nodePos) throws SemanticException {
+  private void transform(CubeQueryContext cubeql, ASTNode parent, ASTNode node, int nodePos) throws LensException {
     if (node == null) {
       return;
     }
@@ -158,7 +158,7 @@ class AggregateResolver implements ContextRewriter {
 
   // Wrap an aggregate function around the node if its a measure, leave it
   // unchanged otherwise
-  private ASTNode wrapAggregate(CubeQueryContext cubeql, ASTNode node) throws SemanticException {
+  private ASTNode wrapAggregate(CubeQueryContext cubeql, ASTNode node) throws LensException {
 
     String tabname = null;
     String colname;
@@ -188,7 +188,7 @@ class AggregateResolver implements ContextRewriter {
         String aggregateFn = measure.getAggregate();
 
         if (StringUtils.isBlank(aggregateFn)) {
-          throw new SemanticException(ErrorMsg.NO_DEFAULT_AGGREGATE, colname);
+          throw new LensException(LensCubeErrorCode.NO_DEFAULT_AGGREGATE.getValue(), colname);
         }
         ASTNode fnroot = new ASTNode(new CommonToken(HiveParser.TOK_FUNCTION));
         fnroot.setParent(node.getParent());

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
index 9309307..98e38d5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
@@ -25,15 +25,15 @@ import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.CubeInterface;
 import org.apache.lens.cube.metadata.Dimension;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.antlr.runtime.CommonToken;
 
@@ -50,7 +50,7 @@ class AliasReplacer implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     Map<String, String> colToTableAlias = cubeql.getColToTableAlias();
 
     extractTabAliasForCol(cubeql);
@@ -93,9 +93,9 @@ class AliasReplacer implements ContextRewriter {
   /**
    * Figure out queried dim attributes and measures from the cube query context
    * @param cubeql
-   * @throws SemanticException
+   * @throws LensException
    */
-  private void findDimAttributesAndMeasures(CubeQueryContext cubeql) throws SemanticException {
+  private void findDimAttributesAndMeasures(CubeQueryContext cubeql) throws LensException {
     CubeInterface cube = cubeql.getCube();
     if (cube != null) {
       Set<String> cubeColsQueried = cubeql.getColumnsQueried(cube.getName());
@@ -119,11 +119,11 @@ class AliasReplacer implements ContextRewriter {
     }
   }
 
-  private void extractTabAliasForCol(CubeQueryContext cubeql) throws SemanticException {
+  private void extractTabAliasForCol(CubeQueryContext cubeql) throws LensException {
     extractTabAliasForCol(cubeql, cubeql);
   }
 
-  static void extractTabAliasForCol(CubeQueryContext cubeql, TrackQueriedColumns tqc) throws SemanticException {
+  static void extractTabAliasForCol(CubeQueryContext cubeql, TrackQueriedColumns tqc) throws LensException {
     Map<String, String> colToTableAlias = cubeql.getColToTableAlias();
     Set<String> columns = tqc.getTblAliasToColumns().get(CubeQueryContext.DEFAULT_TABLE);
     if (columns == null) {
@@ -145,19 +145,20 @@ class AliasReplacer implements ContextRewriter {
           if (!inCube) {
             String prevDim = colToTableAlias.get(col.toLowerCase());
             if (prevDim != null && !prevDim.equals(dim.getName())) {
-              throw new SemanticException(ErrorMsg.AMBIGOUS_DIM_COLUMN, col, prevDim, dim.getName());
+              throw new LensException(LensCubeErrorCode.AMBIGOUS_DIM_COLUMN.getValue(), col, prevDim, dim.getName());
             }
             String dimAlias = cubeql.getAliasForTableName(dim.getName());
             colToTableAlias.put(col.toLowerCase(), dimAlias);
             tqc.addColumnsQueried(dimAlias, col.toLowerCase());
           } else {
             // throw error because column is in both cube and dimension table
-            throw new SemanticException(ErrorMsg.AMBIGOUS_CUBE_COLUMN, col, cubeql.getCube().getName(), dim.getName());
+            throw new LensException(LensCubeErrorCode.AMBIGOUS_CUBE_COLUMN.getValue(), col,
+                cubeql.getCube().getName(), dim.getName());
           }
         }
       }
       if (colToTableAlias.get(col.toLowerCase()) == null) {
-        throw new SemanticException(ErrorMsg.COLUMN_NOT_FOUND, col);
+        throw new LensException(LensCubeErrorCode.COLUMN_NOT_FOUND.getValue(), col);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
index 6c85c2d..4bd7cc8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/BetweenTimeRangeWriter.java
@@ -22,10 +22,10 @@ import java.util.Iterator;
 import java.util.Set;
 import java.util.TreeSet;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.server.api.error.LensException;
 
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 /**
  * Writes partitions queried in timerange as between clause.
@@ -34,7 +34,7 @@ public class BetweenTimeRangeWriter implements TimeRangeWriter {
 
   @Override
   public String getTimeRangeWhereClause(CubeQueryContext cubeQueryContext, String tableName,
-    Set<FactPartition> rangeParts) throws SemanticException {
+    Set<FactPartition> rangeParts) throws LensException {
     if (rangeParts.size() == 0) {
       return "";
     }
@@ -52,18 +52,19 @@ public class BetweenTimeRangeWriter implements TimeRangeWriter {
       while (it.hasNext()) {
         FactPartition part = it.next();
         if (part.hasContainingPart()) {
-          throw new SemanticException(ErrorMsg.CANNOT_USE_TIMERANGE_WRITER, "Partition has containing part");
+          throw new LensException(LensCubeErrorCode.CANNOT_USE_TIMERANGE_WRITER.getValue(),
+              "Partition has containing part");
         }
         if (first == null) {
           first = part;
         } else {
           // validate partcol, update period are same for both
           if (!first.getPartCol().equalsIgnoreCase(part.getPartCol())) {
-            throw new SemanticException(ErrorMsg.CANNOT_USE_TIMERANGE_WRITER,
+            throw new LensException(LensCubeErrorCode.CANNOT_USE_TIMERANGE_WRITER.getValue(),
               "Part columns are different in partitions");
           }
           if (!first.getPeriod().equals(part.getPeriod())) {
-            throw new SemanticException(ErrorMsg.CANNOT_USE_TIMERANGE_WRITER,
+            throw new LensException(LensCubeErrorCode.CANNOT_USE_TIMERANGE_WRITER.getValue(),
               "Partitions are in different update periods");
           }
         }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 6c88fb3..8a6aa00 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -28,6 +28,7 @@ import org.apache.lens.cube.metadata.CubeInterface;
 import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
 import org.apache.lens.cube.parse.HQLParser.TreeNode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -35,7 +36,6 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.antlr.runtime.CommonToken;
@@ -118,7 +118,7 @@ public class CandidateFact implements CandidateTable {
     numQueriedParts += incr;
   }
 
-  private void updateTimeRanges(ASTNode root, ASTNode parent, int childIndex) throws SemanticException {
+  private void updateTimeRanges(ASTNode root, ASTNode parent, int childIndex) throws LensException {
     if (root == null) {
       return;
     } else if (root.getToken().getType() == TOK_FUNCTION) {
@@ -135,7 +135,7 @@ public class CandidateFact implements CandidateTable {
   }
 
   // copy ASTs from CubeQueryContext
-  public void copyASTs(CubeQueryContext cubeql) throws SemanticException {
+  public void copyASTs(CubeQueryContext cubeql) throws LensException {
     this.selectAST = HQLParser.copyAST(cubeql.getSelectAST());
     this.whereAST = HQLParser.copyAST(cubeql.getWhereAST());
     if (cubeql.getJoinTree() != null) {
@@ -155,7 +155,7 @@ public class CandidateFact implements CandidateTable {
     return getStorgeWhereClauseMap().get(storageTable);
   }
 
-  public void updateTimeranges(CubeQueryContext cubeql) throws SemanticException {
+  public void updateTimeranges(CubeQueryContext cubeql) throws LensException {
     // Update WhereAST with range clause
     // resolve timerange positions and replace it by corresponding where clause
     for (int i = 0; i < cubeql.getTimeRanges().size(); i++) {
@@ -166,7 +166,7 @@ public class CandidateFact implements CandidateTable {
         try {
           rangeAST = HQLParser.parseExpr(rangeWhere);
         } catch (ParseException e) {
-          throw new SemanticException(e);
+          throw new LensException(e);
         }
         rangeAST.setParent(timenodes.get(i).parent);
         timenodes.get(i).parent.setChild(timenodes.get(i).childIndex, rangeAST);
@@ -178,9 +178,9 @@ public class CandidateFact implements CandidateTable {
    * Update the ASTs to include only the fields queried from this fact, in all the expressions
    *
    * @param cubeql
-   * @throws SemanticException
+   * @throws LensException
    */
-  public void updateASTs(CubeQueryContext cubeql) throws SemanticException {
+  public void updateASTs(CubeQueryContext cubeql) throws LensException {
     Set<String> cubeCols = cubeql.getCube().getAllFieldNames();
 
     // update select AST with selected fields
@@ -223,7 +223,7 @@ public class CandidateFact implements CandidateTable {
   }
 
   private Set<String> getColsInExpr(final CubeQueryContext cubeql, final Set<String> cubeCols,
-    ASTNode expr) throws SemanticException {
+    ASTNode expr) throws LensException {
     final Set<String> cubeColsInExpr = new HashSet<String>();
     HQLParser.bft(expr, new ASTNodeVisitor() {
       @Override
@@ -405,7 +405,7 @@ public class CandidateFact implements CandidateTable {
     return null;
   }
 
-  public Set<String> getTimePartCols(CubeQueryContext query) throws SemanticException {
+  public Set<String> getTimePartCols(CubeQueryContext query) throws LensException {
     Set<String> cubeTimeDimensions = baseTable.getTimedDimensions();
     Set<String> timePartDimensions = new HashSet<String>();
     String singleStorageTable = storageTables.iterator().next();
@@ -413,7 +413,7 @@ public class CandidateFact implements CandidateTable {
     try {
       partitionKeys = query.getMetastoreClient().getTable(singleStorageTable).getPartitionKeys();
     } catch (HiveException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
     for (FieldSchema fs : partitionKeys) {
       if (cubeTimeDimensions.contains(CubeQueryContext.getTimeDimOfPartitionColumn(baseTable, fs.getName()))) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
index 3e73d02..69fbcc5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
@@ -20,18 +20,18 @@ package org.apache.lens.cube.parse;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.CubeQueryContext.OptionalDimCtx;
 import org.apache.lens.cube.parse.CubeQueryContext.QueriedExprColumn;
 import org.apache.lens.cube.parse.ExpressionResolver.ExprSpecContext;
 import org.apache.lens.cube.parse.ExpressionResolver.ExpressionContext;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import com.google.common.collect.Sets;
 
@@ -56,7 +56,7 @@ class CandidateTableResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     if (checkForQueriedColumns) {
       log.debug("Dump queried columns:{}", cubeql.getTblAliasToColumns());
       populateCandidateTables(cubeql);
@@ -87,12 +87,12 @@ class CandidateTableResolver implements ContextRewriter {
     }
   }
 
-  private void populateCandidateTables(CubeQueryContext cubeql) throws SemanticException {
+  private void populateCandidateTables(CubeQueryContext cubeql) throws LensException {
     try {
       if (cubeql.getCube() != null) {
         List<CubeFactTable> factTables = cubeql.getMetastoreClient().getAllFacts(cubeql.getCube());
         if (factTables.isEmpty()) {
-          throw new SemanticException(ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE, cubeql.getCube().getName()
+          throw new LensException(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getValue(), cubeql.getCube().getName()
             + " does not have any facts");
         }
         for (CubeFactTable fact : factTables) {
@@ -108,11 +108,11 @@ class CandidateTableResolver implements ContextRewriter {
         }
       }
     } catch (HiveException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
   }
 
-  private void populateDimTables(Dimension dim, CubeQueryContext cubeql, boolean optional) throws SemanticException {
+  private void populateDimTables(Dimension dim, CubeQueryContext cubeql, boolean optional) throws LensException {
     if (cubeql.getCandidateDimTables().get(dim) != null) {
       return;
     }
@@ -122,7 +122,7 @@ class CandidateTableResolver implements ContextRewriter {
       List<CubeDimensionTable> dimtables = cubeql.getMetastoreClient().getAllDimensionTables(dim);
       if (dimtables.isEmpty()) {
         if (!optional) {
-          throw new SemanticException(ErrorMsg.NO_CANDIDATE_DIM_AVAILABLE, dim.getName(),
+          throw new LensException(LensCubeErrorCode.NO_CANDIDATE_DIM_AVAILABLE.getValue(), dim.getName(),
             "Dimension tables do not exist");
         } else {
           log.info("Not considering optional dimension {}  as, No dimension tables exist", dim);
@@ -135,7 +135,7 @@ class CandidateTableResolver implements ContextRewriter {
       }
       log.info("Populated candidate dims: {} for {}", cubeql.getCandidateDimTables().get(dim), dim);
     } catch (HiveException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
   }
 
@@ -198,7 +198,7 @@ class CandidateTableResolver implements ContextRewriter {
     }
   }
 
-  private void resolveCandidateFactTables(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveCandidateFactTables(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getCube() != null) {
       String str = cubeql.getConf().get(CubeQueryConfUtil.getValidFactTablesKey(cubeql.getCube().getName()));
       List<String> validFactTables =
@@ -284,7 +284,7 @@ class CandidateTableResolver implements ContextRewriter {
       Set<String> dimExprs = new HashSet<String>(cubeql.getQueriedExprs());
       dimExprs.removeAll(cubeql.getQueriedExprsWithMeasures());
       if (cubeql.getCandidateFacts().size() == 0) {
-        throw new SemanticException(ErrorMsg.NO_FACT_HAS_COLUMN,
+        throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getValue(),
           (!queriedDimAttrs.isEmpty() ? queriedDimAttrs.toString() : "")
           +  (!dimExprs.isEmpty() ? dimExprs.toString() : ""));
       }
@@ -308,14 +308,14 @@ class CandidateTableResolver implements ContextRewriter {
         String msrString = (!queriedMsrs.isEmpty() ? queriedMsrs.toString() : "")
           + (!cubeql.getQueriedExprsWithMeasures().isEmpty() ? cubeql.getQueriedExprsWithMeasures().toString() : "");
         if (cfactset.isEmpty()) {
-          throw new SemanticException(ErrorMsg.NO_FACT_HAS_COLUMN, msrString);
+          throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getValue(), msrString);
         }
         cubeql.getCandidateFactSets().addAll(cfactset);
         cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.columnNotFound(queriedMsrs,
           cubeql.getQueriedExprsWithMeasures()));
 
         if (cubeql.getCandidateFacts().size() == 0) {
-          throw new SemanticException(ErrorMsg.NO_FACT_HAS_COLUMN, msrString);
+          throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getValue(), msrString);
         }
       }
     }
@@ -361,7 +361,7 @@ class CandidateTableResolver implements ContextRewriter {
     return cfactset;
   }
 
-  private void resolveCandidateDimTablesForJoinsAndDenorms(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveCandidateDimTablesForJoinsAndDenorms(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getAutoJoinCtx() == null) {
       return;
     }
@@ -431,8 +431,8 @@ class CandidateTableResolver implements ContextRewriter {
           OptionalDimCtx optdim = cubeql.getOptionalDimensionMap().get(dim);
           if ((cubeql.getDimensions() != null && cubeql.getDimensions().contains(dim))
             || (optdim != null && optdim.isRequiredInJoinChain)) {
-            throw new SemanticException(ErrorMsg.NO_DIM_HAS_COLUMN, dim.getName(), cubeql.getAutoJoinCtx()
-              .getAllJoinPathColumnsOfTable(dim).toString());
+            throw new LensException(LensCubeErrorCode.NO_DIM_HAS_COLUMN.getValue(), dim.getName(),
+                cubeql.getAutoJoinCtx().getAllJoinPathColumnsOfTable(dim).toString());
           } else {
             // remove it from optional tables
             log.info("Not considering optional dimension {} as, No dimension table has the queried columns:{}"
@@ -444,7 +444,7 @@ class CandidateTableResolver implements ContextRewriter {
     }
   }
 
-  private void resolveCandidateFactTablesForJoins(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveCandidateFactTablesForJoins(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getAutoJoinCtx() == null) {
       return;
     }
@@ -474,7 +474,8 @@ class CandidateTableResolver implements ContextRewriter {
         }
       }
       if (cubeql.getCandidateFacts().size() == 0) {
-        throw new SemanticException(ErrorMsg.NO_FACT_HAS_COLUMN, colSet == null ? "NULL" : colSet.toString());
+        throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getValue(),
+            colSet == null ? "NULL" : colSet.toString());
       }
     }
   }
@@ -633,7 +634,7 @@ class CandidateTableResolver implements ContextRewriter {
     }
   }
 
-  private void resolveCandidateDimTables(CubeQueryContext cubeql) throws SemanticException {
+  private void resolveCandidateDimTables(CubeQueryContext cubeql) throws LensException {
     if (cubeql.getDimensions().size() != 0) {
       for (Dimension dim : cubeql.getDimensions()) {
         // go over the columns accessed in the query and find out which tables
@@ -668,7 +669,7 @@ class CandidateTableResolver implements ContextRewriter {
         }
 
         if (cubeql.getCandidateDimTables().get(dim).size() == 0) {
-          throw new SemanticException(ErrorMsg.NO_DIM_HAS_COLUMN, dim.getName(), cubeql
+          throw new LensException(LensCubeErrorCode.NO_DIM_HAS_COLUMN.getValue(), dim.getName(), cubeql
             .getColumnsQueried(dim.getName()).toString());
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckColumnMapping.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckColumnMapping.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckColumnMapping.java
index 59e4fd3..33a5dda 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckColumnMapping.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckColumnMapping.java
@@ -18,8 +18,9 @@
  */
 package org.apache.lens.cube.parse;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 public class CheckColumnMapping extends ValidationRule {
 
@@ -28,7 +29,7 @@ public class CheckColumnMapping extends ValidationRule {
   }
 
   @Override
-  public boolean validate(CubeQueryContext ctx) throws SemanticException {
+  public boolean validate(CubeQueryContext ctx) throws LensException {
     // TODO
     return true;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckTableNames.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckTableNames.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckTableNames.java
index fce1a04..8586262 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckTableNames.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CheckTableNames.java
@@ -18,8 +18,9 @@
  */
 package org.apache.lens.cube.parse;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 public class CheckTableNames extends ValidationRule {
 
@@ -28,7 +29,7 @@ public class CheckTableNames extends ValidationRule {
   }
 
   @Override
-  public boolean validate(CubeQueryContext ctx) throws SemanticException {
+  public boolean validate(CubeQueryContext ctx) throws LensException {
     // TODO
     return true;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
index 2ff5959..dfe0c32 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
@@ -23,13 +23,13 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 import java.util.HashSet;
 import java.util.Set;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
 import org.apache.lens.cube.parse.HQLParser.TreeNode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import com.google.common.base.Optional;
 
@@ -39,11 +39,11 @@ class ColumnResolver implements ContextRewriter {
   }
 
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     extractColumns(cubeql);
   }
 
-  private void extractColumns(CubeQueryContext cubeql) throws SemanticException {
+  private void extractColumns(CubeQueryContext cubeql) throws LensException {
     // Check if its 'select * from...'
     ASTNode selTree = cubeql.getSelectAST();
     if (selTree.getChildCount() == 1) {
@@ -55,7 +55,7 @@ class ColumnResolver implements ContextRewriter {
       if (star != null) {
         int starType = star.getToken().getType();
         if (TOK_FUNCTIONSTAR == starType || TOK_ALLCOLREF == starType) {
-          throw new SemanticException(ErrorMsg.ALL_COLUMNS_NOT_SUPPORTED);
+          throw new LensException(LensCubeErrorCode.ALL_COLUMNS_NOT_SUPPORTED.getValue());
         }
       }
     }
@@ -70,7 +70,7 @@ class ColumnResolver implements ContextRewriter {
     for (String table : cubeql.getTblAliasToColumns().keySet()) {
       if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table)) {
         if (!cubeql.addQueriedTable(table)) {
-          throw new SemanticException(ErrorMsg.NEITHER_CUBE_NOR_DIMENSION);
+          throw new LensException(LensCubeErrorCode.NEITHER_CUBE_NOR_DIMENSION.getValue());
         }
       }
     }
@@ -78,7 +78,7 @@ class ColumnResolver implements ContextRewriter {
 
   // finds columns in AST passed.
   static void getColsForTree(final CubeQueryContext cubeql, ASTNode tree, final TrackQueriedColumns tqc)
-    throws SemanticException {
+    throws LensException {
     if (tree == null) {
       return;
     }
@@ -124,7 +124,7 @@ class ColumnResolver implements ContextRewriter {
   // added
   // only if timerange clause shouldn't be replaced with its correspodning
   // partition column
-  private void getColsForWhereTree(final CubeQueryContext cubeql) throws SemanticException {
+  private void getColsForWhereTree(final CubeQueryContext cubeql) throws LensException {
     if (cubeql.getWhereAST() == null) {
       return;
     }
@@ -152,7 +152,7 @@ class ColumnResolver implements ContextRewriter {
   // and user given alias is the final alias of the expression.
   private static final String SELECT_ALIAS_PREFIX = "expr";
 
-  private void getColsForSelectTree(final CubeQueryContext cubeql) throws SemanticException {
+  private void getColsForSelectTree(final CubeQueryContext cubeql) throws LensException {
     int exprInd = 1;
     for (int i = 0; i < cubeql.getSelectAST().getChildCount(); i++) {
       ASTNode selectExpr = (ASTNode) cubeql.getSelectAST().getChild(i);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/ContextRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ContextRewriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ContextRewriter.java
index feb26d7..073bc02 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ContextRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ContextRewriter.java
@@ -20,14 +20,12 @@ package org.apache.lens.cube.parse;
 
 import org.apache.lens.server.api.error.LensException;
 
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-
 interface ContextRewriter {
   /**
    * Rewrites and updates {@link CubeQueryContext}
    *
    * @param cubeql CubeQueryContext
-   * @throws SemanticException
+   * @throws LensException
    */
-  void rewriteContext(CubeQueryContext cubeql) throws SemanticException, LensException;
+  void rewriteContext(CubeQueryContext cubeql) throws LensException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index d753e3f..16429f0 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -29,14 +29,15 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.*;
 
@@ -166,7 +167,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     new HashMap<Dimension, PruneCauses<CubeDimensionTable>>();
 
   public CubeQueryContext(ASTNode ast, QB qb, Configuration queryConf, HiveConf metastoreConf)
-    throws SemanticException {
+    throws LensException {
     this.ast = ast;
     this.qb = qb;
     this.conf = queryConf;
@@ -175,7 +176,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     try {
       metastoreClient = CubeMetastoreClient.getInstance(metastoreConf);
     } catch (HiveException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
     if (qb.getParseInfo().getWhrForClause(clauseName) != null) {
       this.whereAST = qb.getParseInfo().getWhrForClause(clauseName);
@@ -209,7 +210,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return dimensions != null && !dimensions.isEmpty();
   }
 
-  private void extractMetaTables() throws SemanticException {
+  private void extractMetaTables() throws LensException {
     List<String> tabAliases = new ArrayList<String>(qb.getTabAliases());
     Set<String> missing = new HashSet<String>();
     for (String alias : tabAliases) {
@@ -223,12 +224,12 @@ public class CubeQueryContext implements TrackQueriedColumns {
       boolean added = addJoinChain(alias, false);
       if (!added) {
         log.info("Queried tables do not exist. Missing table:{}", alias);
-        throw new SemanticException(ErrorMsg.NEITHER_CUBE_NOR_DIMENSION);
+        throw new LensException(LensCubeErrorCode.NEITHER_CUBE_NOR_DIMENSION.getValue());
       }
     }
   }
 
-  private boolean addJoinChain(String alias, boolean isOptional) throws SemanticException {
+  private boolean addJoinChain(String alias, boolean isOptional) throws LensException {
     boolean retVal = false;
     String aliasLowerCaseStr = alias.toLowerCase();
     JoinChain joinchain = null;
@@ -258,7 +259,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
       boolean added = addQueriedTable(alias, destTable, isOptional, true);
       if (!added) {
         log.info("Queried tables do not exist. Missing tables:{}", destTable);
-        throw new SemanticException(ErrorMsg.NEITHER_CUBE_NOR_DIMENSION);
+        throw new LensException(LensCubeErrorCode.NEITHER_CUBE_NOR_DIMENSION.getValue());
       }
       log.info("Added join chain for {}", destTable);
       return true;
@@ -267,11 +268,11 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return retVal;
   }
 
-  public boolean addQueriedTable(String alias) throws SemanticException {
+  public boolean addQueriedTable(String alias) throws LensException {
     return addQueriedTable(alias, false);
   }
 
-  private boolean addQueriedTable(String alias, boolean isOptional) throws SemanticException {
+  private boolean addQueriedTable(String alias, boolean isOptional) throws LensException {
     String tblName = qb.getTabNameForAlias(alias);
     if (tblName == null) {
       tblName = alias;
@@ -295,10 +296,10 @@ public class CubeQueryContext implements TrackQueriedColumns {
    * @param isChainedDimension pass true when you're adding the dimension as a joinchain destination, pass false when
    *                           this table is mentioned by name in the user query
    * @return true if added
-   * @throws SemanticException
+   * @throws LensException
    */
   private boolean addQueriedTable(String alias, String tblName, boolean isOptional, boolean isChainedDimension)
-    throws SemanticException {
+    throws LensException {
     alias = alias.toLowerCase();
     if (cubeTbls.containsKey(alias)) {
       return true;
@@ -307,7 +308,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
       if (metastoreClient.isCube(tblName)) {
         if (cube != null) {
           if (!cube.getName().equalsIgnoreCase(tblName)) {
-            throw new SemanticException(ErrorMsg.MORE_THAN_ONE_CUBE, cube.getName(), tblName);
+            throw new LensException(LensCubeErrorCode.MORE_THAN_ONE_CUBE.getValue(), cube.getName(), tblName);
           }
         }
         cube = metastoreClient.getCube(tblName);
@@ -398,22 +399,22 @@ public class CubeQueryContext implements TrackQueriedColumns {
     boolean isRequiredInJoinChain = false;
   }
 
-  public void addOptionalJoinDimTable(String alias, boolean isRequired) throws SemanticException {
+  public void addOptionalJoinDimTable(String alias, boolean isRequired) throws LensException {
     addOptionalDimTable(alias, null, isRequired, null, false, (String[])null);
   }
 
   public void addOptionalExprDimTable(String dimAlias, String queriedExpr, String srcTableAlias,
-    CandidateTable candidate, String... cols) throws SemanticException {
+    CandidateTable candidate, String... cols) throws LensException {
     addOptionalDimTable(dimAlias, candidate, false, queriedExpr, false, srcTableAlias, cols);
   }
 
   public void addOptionalDimTable(String alias, CandidateTable candidate, boolean isRequiredInJoin, String cubeCol,
-    boolean isRef, String... cols) throws SemanticException {
+    boolean isRef, String... cols) throws LensException {
     addOptionalDimTable(alias, candidate, isRequiredInJoin, cubeCol, true, null, cols);
   }
 
   private void addOptionalDimTable(String alias, CandidateTable candidate, boolean isRequiredInJoin, String cubeCol,
-    boolean isRef, String tableAlias, String... cols) throws SemanticException {
+    boolean isRef, String tableAlias, String... cols) throws LensException {
     alias = alias.toLowerCase();
     try {
       if (!addQueriedTable(alias, true)) {
@@ -446,7 +447,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
           (cubeCol == null ? "" : " for column:" + cubeCol),  isRef);
       }
     } catch (HiveException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
   }
 
@@ -679,14 +680,14 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return StorageUtil.getWhereClause(dimsToQuery.get(cubeTbls.get(alias)), alias);
   }
 
-  String getQBFromString(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery) throws SemanticException {
+  String getQBFromString(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
     String fromString = null;
     if (getJoinTree() == null) {
       if (cube != null) {
         fromString = fact.getStorageString(getAliasForTableName(cube.getName()));
       } else {
         if (dimensions.size() != 1) {
-          throw new SemanticException(ErrorMsg.NO_JOIN_CONDITION_AVAIABLE);
+          throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAIABLE.getValue());
         }
         Dimension dim = dimensions.iterator().next();
         fromString = dimsToQuery.get(dim).getStorageString(getAliasForTableName(dim.getName()));
@@ -700,7 +701,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
   }
 
   private void getQLString(QBJoinTree joinTree, StringBuilder builder, CandidateFact fact,
-    Map<Dimension, CandidateDim> dimsToQuery) throws SemanticException {
+    Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
     String joiningTable = null;
     if (joinTree.getBaseSrc()[0] == null) {
       if (joinTree.getJoinSrc() != null) {
@@ -739,11 +740,11 @@ public class CubeQueryContext implements TrackQueriedColumns {
         dimsToQuery.get(cubeTbls.get(joiningTable)).setWhereClauseAdded();
       }
     } else {
-      throw new SemanticException(ErrorMsg.NO_JOIN_CONDITION_AVAIABLE);
+      throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAIABLE.getValue());
     }
   }
 
-  void setNonexistingParts(Map<String, Set<String>> nonExistingParts) throws SemanticException {
+  void setNonexistingParts(Map<String, Set<String>> nonExistingParts) throws LensException {
     if (!nonExistingParts.isEmpty()) {
       ByteArrayOutputStream out = null;
       String partsStr;
@@ -753,13 +754,13 @@ public class CubeQueryContext implements TrackQueriedColumns {
         mapper.writeValue(out, nonExistingParts);
         partsStr = out.toString("UTF-8");
       } catch (Exception e) {
-        throw new SemanticException("Error writing non existing parts", e);
+        throw new LensException("Error writing non existing parts", e);
       } finally {
         if (out != null) {
           try {
             out.close();
           } catch (IOException e) {
-            throw new SemanticException(e);
+            throw new LensException(e);
           }
         }
       }
@@ -773,7 +774,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return conf.get(CubeQueryConfUtil.NON_EXISTING_PARTITIONS);
   }
 
-  private Map<Dimension, CandidateDim> pickCandidateDimsToQuery(Set<Dimension> dimensions) throws SemanticException {
+  private Map<Dimension, CandidateDim> pickCandidateDimsToQuery(Set<Dimension> dimensions) throws LensException {
     Map<Dimension, CandidateDim> dimsToQuery = new HashMap<Dimension, CandidateDim>();
     if (!dimensions.isEmpty()) {
       for (Dimension dim : dimensions) {
@@ -792,18 +793,18 @@ public class CubeQueryContext implements TrackQueriedColumns {
               mapper.writeValue(out, dimPruningMsgs.get(dim).getJsonObject());
               reason = out.toString("UTF-8");
             } catch (Exception e) {
-              throw new SemanticException("Error writing dim pruning messages", e);
+              throw new LensException("Error writing dim pruning messages", e);
             } finally {
               if (out != null) {
                 try {
                   out.close();
                 } catch (IOException e) {
-                  throw new SemanticException(e);
+                  throw new LensException(e);
                 }
               }
             }
           }
-          throw new SemanticException(ErrorMsg.NO_CANDIDATE_DIM_AVAILABLE, dim.getName(), reason);
+          throw new LensException(LensCubeErrorCode.NO_CANDIDATE_DIM_AVAILABLE.getValue(), dim.getName(), reason);
         }
       }
     }
@@ -811,7 +812,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return dimsToQuery;
   }
 
-  private Set<CandidateFact> pickCandidateFactToQuery() throws SemanticException {
+  private Set<CandidateFact> pickCandidateFactToQuery() throws LensException {
     Set<CandidateFact> facts = null;
     if (hasCubeInQuery()) {
       if (candidateFactSets.size() > 0) {
@@ -827,18 +828,18 @@ public class CubeQueryContext implements TrackQueriedColumns {
             mapper.writeValue(out, factPruningMsgs.getJsonObject());
             reason = out.toString("UTF-8");
           } catch (Exception e) {
-            throw new SemanticException("Error writing fact pruning messages", e);
+            throw new LensException("Error writing fact pruning messages", e);
           } finally {
             if (out != null) {
               try {
                 out.close();
               } catch (IOException e) {
-                throw new SemanticException(e);
+                throw new LensException(e);
               }
             }
           }
         }
-        throw new SemanticException(ErrorMsg.NO_CANDIDATE_FACT_AVAILABLE, reason);
+        throw new LensException(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getValue(), reason);
       }
     }
     return facts;
@@ -848,7 +849,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
   @Getter private Collection<CandidateFact> pickedFacts;
   @Getter private Collection<CandidateDim> pickedDimTables;
 
-  public String toHQL() throws SemanticException {
+  public String toHQL() throws LensException {
     Set<CandidateFact> cfacts = pickCandidateFactToQuery();
     Map<Dimension, CandidateDim> dimsToQuery = pickCandidateDimsToQuery(dimensions);
     if (autoJoinCtx != null) {
@@ -935,7 +936,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
   }
 
   private HQLContextInterface createHQLContext(Set<CandidateFact> facts, Map<Dimension, CandidateDim> dimsToQuery,
-    Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws SemanticException {
+    Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws LensException {
     if (facts == null || facts.size() == 0) {
       return new DimOnlyHQLContext(dimsToQuery, query);
     } else if (facts.size() == 1 && facts.iterator().next().getStorageTables().size() > 1) {
@@ -949,7 +950,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     }
   }
 
-  public ASTNode toAST(Context ctx) throws SemanticException {
+  public ASTNode toAST(Context ctx) throws LensException {
     String hql = toHQL();
     ParseDriver pd = new ParseDriver();
     ASTNode tree;
@@ -957,7 +958,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
       log.info("HQL:{}", hql);
       tree = pd.parse(hql, ctx);
     } catch (ParseException e) {
-      throw new SemanticException(e);
+      throw new LensException(e);
     }
     return ParseUtils.findRootNonNullToken(tree);
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
index 72dc64b..0dfd7da 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
@@ -18,6 +18,8 @@
  */
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.error.LensCubeErrorCode.SYNTAX_ERROR;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
@@ -185,27 +187,37 @@ public class CubeQueryRewriter {
     rewriters.add(new LightestDimensionResolver(conf));
   }
 
-  public CubeQueryContext rewrite(ASTNode astnode) throws SemanticException, LensException {
-    CubeSemanticAnalyzer analyzer = new CubeSemanticAnalyzer(conf, hconf);
-    analyzer.analyze(astnode, qlCtx);
-    CubeQueryContext ctx = analyzer.getQueryContext();
+  public CubeQueryContext rewrite(ASTNode astnode) throws LensException {
+    CubeSemanticAnalyzer analyzer;
+    try {
+      analyzer = new CubeSemanticAnalyzer(conf, hconf);
+      analyzer.analyze(astnode, qlCtx);
+    } catch (SemanticException e) {
+      throw new LensException(SYNTAX_ERROR.getValue(), e, e.getMessage());
+    }
+    CubeQueryContext ctx = new CubeQueryContext(astnode, analyzer.getCubeQB(), conf, hconf);
     rewrite(rewriters, ctx);
     return ctx;
   }
 
-  public CubeQueryContext rewrite(String command) throws ParseException, SemanticException, LensException {
+  public CubeQueryContext rewrite(String command) throws LensException {
     if (command != null) {
       command = command.replace("\n", "");
     }
-    ParseDriver pd = new ParseDriver();
-    ASTNode tree = pd.parse(command, qlCtx, false);
-    tree = ParseUtils.findRootNonNullToken(tree);
+    ASTNode tree;
+    try {
+      ParseDriver pd = new ParseDriver();
+      tree = pd.parse(command, qlCtx, false);
+      tree = ParseUtils.findRootNonNullToken(tree);
+    } catch (ParseException e) {
+      throw new LensException(SYNTAX_ERROR.getValue(), e, e.getMessage());
+    }
     return rewrite(tree);
   }
 
   private static final String ITER_STR = "-ITER-";
 
-  private void rewrite(List<ContextRewriter> rewriters, CubeQueryContext ctx) throws SemanticException, LensException {
+  private void rewrite(List<ContextRewriter> rewriters, CubeQueryContext ctx) throws LensException {
     int i = 0;
     for (ContextRewriter rewriter : rewriters) {
       /*

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
index 68bffed..ebbe404 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
@@ -24,9 +24,10 @@ import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.*;
 
+import lombok.Getter;
+
 /**
  * Accepts cube query AST and rewrites into storage table query
  */
@@ -34,7 +35,8 @@ public class CubeSemanticAnalyzer extends SemanticAnalyzer {
   private final Configuration queryConf;
   private final HiveConf hiveConf;
   private final List<ValidationRule> validationRules = new ArrayList<ValidationRule>();
-  private CubeQueryContext cubeQl;
+  @Getter
+  private QB cubeQB;
 
   public CubeSemanticAnalyzer(Configuration queryConf, HiveConf hiveConf) throws SemanticException {
     super(hiveConf);
@@ -51,13 +53,7 @@ public class CubeSemanticAnalyzer extends SemanticAnalyzer {
   @Override
   public void analyzeInternal(ASTNode ast) throws SemanticException {
     reset();
-    QB qb = new QB(null, null, false);
-    // do not allow create table/view commands
-    // TODO Move this to a validation rule
-    if (ast.getToken().getType() == HiveParser.TOK_CREATETABLE
-      || ast.getToken().getType() == HiveParser.TOK_CREATEVIEW) {
-      throw new SemanticException(ErrorMsg.CREATE_NOT_ALLOWED);
-    }
+    cubeQB = new QB(null, null, false);
 
     if (ast.getToken().getType() == HiveParser.TOK_QUERY) {
       if (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.KW_CUBE) {
@@ -69,34 +65,9 @@ public class CubeSemanticAnalyzer extends SemanticAnalyzer {
       }
     }
     // analyzing from the ASTNode.
-    if (!doPhase1(ast, qb, initPhase1Ctx())) {
+    if (!doPhase1(ast, cubeQB, initPhase1Ctx())) {
       // if phase1Result false return
       return;
     }
-    cubeQl = new CubeQueryContext(ast, qb, queryConf, hiveConf);
-    // cubeQl.init();
-    // validate();
-
-    // TODO Move this to a validation Rule
-    // QBParseInfo qbp = qb.getParseInfo();
-    // TreeSet<String> ks = new TreeSet<String>(qbp.getClauseNames());
-    // if (ks.size() > 1) {
-    // throw new SemanticException("nested/sub queries not allowed yet");
-    // }
-    // Operator sinkOp = genPlan(qb);
-    // System.out.println(sinkOp.toString());
-  }
-
-  @Override
-  public void validate() throws SemanticException {
-    for (ValidationRule rule : validationRules) {
-      if (!rule.validate(cubeQl)) {
-        break;
-      }
-    }
-  }
-
-  public CubeQueryContext getQueryContext() {
-    return cubeQl;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
index 5c77548..486c6b3 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
@@ -29,12 +29,12 @@ import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.UpdatePeriod;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.time.DateUtils;
-import org.apache.hadoop.hive.ql.ErrorMsg;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import lombok.Data;
 import lombok.EqualsAndHashCode;
@@ -120,17 +120,17 @@ public final class DateUtil {
     throw new IllegalArgumentException("Unsupported formatting for date" + str);
   }
 
-  public static Date resolveDate(String str, Date now) throws SemanticException {
+  public static Date resolveDate(String str, Date now) throws LensException {
     if (RELDATE_VALIDATOR.matcher(str).matches()) {
       return resolveRelativeDate(str, now);
     } else {
       return resolveAbsoluteDate(str);
     }
   }
-  public static String relativeToAbsolute(String relative) throws SemanticException {
+  public static String relativeToAbsolute(String relative) throws LensException {
     return relativeToAbsolute(relative, new Date());
   }
-  public static String relativeToAbsolute(String relative, Date now) throws SemanticException {
+  public static String relativeToAbsolute(String relative, Date now) throws LensException {
     if (RELDATE_VALIDATOR.matcher(relative).matches()) {
       return ABSDATE_PARSER.get().format(resolveRelativeDate(relative, now));
     } else {
@@ -138,18 +138,18 @@ public final class DateUtil {
     }
   }
 
-  public static Date resolveAbsoluteDate(String str) throws SemanticException {
+  public static Date resolveAbsoluteDate(String str) throws LensException {
     try {
       return ABSDATE_PARSER.get().parse(getAbsDateFormatString(str));
     } catch (ParseException e) {
       log.error("Invalid date format. expected only {} date provided:{}", ABSDATE_FMT, str, e);
-      throw new SemanticException(e, ErrorMsg.WRONG_TIME_RANGE_FORMAT, ABSDATE_FMT, str);
+      throw new LensException(LensCubeErrorCode.WRONG_TIME_RANGE_FORMAT.getValue(), ABSDATE_FMT, str);
     }
   }
 
-  public static Date resolveRelativeDate(String str, Date now) throws SemanticException {
+  public static Date resolveRelativeDate(String str, Date now) throws LensException {
     if (StringUtils.isBlank(str)) {
-      throw new SemanticException(ErrorMsg.NULL_DATE_VALUE);
+      throw new LensException(LensCubeErrorCode.NULL_DATE_VALUE.getValue());
     }
 
     // Resolve NOW with proper granularity
@@ -182,7 +182,7 @@ public final class DateUtil {
         } else if ("second".equals(unit)) {
           calendar = DateUtils.truncate(calendar, Calendar.SECOND);
         } else {
-          throw new SemanticException(ErrorMsg.INVALID_TIME_UNIT, unit);
+          throw new LensException(LensCubeErrorCode.INVALID_TIME_UNIT.getValue(), unit);
         }
       }
     }
@@ -443,7 +443,7 @@ public final class DateUtil {
       this.calendarField = calendarField;
     }
 
-    static TimeDiff parseFrom(String diffStr) throws SemanticException {
+    static TimeDiff parseFrom(String diffStr) throws LensException {
       // Get the relative diff part to get eventual date based on now.
       Matcher qtyMatcher = P_QUANTITY.matcher(diffStr);
       int qty = 1;
@@ -477,7 +477,7 @@ public final class DateUtil {
         } else if ("second".equals(unit)) {
           return new TimeDiff(qty, SECOND);
         } else {
-          throw new SemanticException(ErrorMsg.INVALID_TIME_UNIT, unit);
+          throw new LensException(LensCubeErrorCode.INVALID_TIME_UNIT.getValue(), unit);
         }
       }
       return new TimeDiff(0, SECOND);

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index 517e8fc..3ef9652 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -23,17 +23,17 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TABLE_OR_COL;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.ReferencedDimAtrribute.ChainRefCol;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.ExpressionResolver.ExprSpecContext;
 import org.apache.lens.cube.parse.ExpressionResolver.ExpressionContext;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 import org.antlr.runtime.CommonToken;
 
@@ -134,7 +134,7 @@ public class DenormalizationResolver implements ContextRewriter {
     // When candidate table does not have the field, this method checks
     // if the field can be reached through reference,
     // if yes adds the ref usage and returns to true, if not returns false.
-    boolean addRefUsage(CandidateTable table, String col, String srcTbl) throws SemanticException {
+    boolean addRefUsage(CandidateTable table, String col, String srcTbl) throws LensException {
       // available as referenced col
       if (referencedCols.containsKey(col)) {
         for (ReferencedQueriedColumn refer : referencedCols.get(col)) {
@@ -195,7 +195,7 @@ public class DenormalizationResolver implements ContextRewriter {
     }
 
     public Set<Dimension> rewriteDenormctx(CandidateFact cfact, Map<Dimension, CandidateDim> dimsToQuery,
-      boolean replaceFact) throws SemanticException {
+      boolean replaceFact) throws LensException {
       Set<Dimension> refTbls = new HashSet<Dimension>();
 
       if (!tableToRefCols.isEmpty()) {
@@ -237,7 +237,7 @@ public class DenormalizationResolver implements ContextRewriter {
       return false;
     }
 
-    private void pickColumnsForTable(String tbl) throws SemanticException {
+    private void pickColumnsForTable(String tbl) throws LensException {
       if (tableToRefCols.containsKey(tbl)) {
         for (ReferencedQueriedColumn refered : tableToRefCols.get(tbl)) {
           if (!refered.col.isChainedColumn()) {
@@ -251,7 +251,7 @@ public class DenormalizationResolver implements ContextRewriter {
               }
             }
             if (refered.references.isEmpty()) {
-              throw new SemanticException("No reference column available for " + refered);
+              throw new LensException(LensCubeErrorCode.NO_REF_COL_AVAILABLE.getValue(), refered);
             }
             PickedReference picked = new PickedReference(refered.references.iterator().next(),
               cubeql.getAliasForTableName(refered.srcTable.getName()), tbl);
@@ -268,7 +268,7 @@ public class DenormalizationResolver implements ContextRewriter {
               }
             }
             if (refered.chainRefCols.isEmpty()) {
-              throw new SemanticException("No chain reference column available for " + refered);
+              throw new LensException("No chain reference column available for " + refered);
             }
             PickedReference picked =
               new PickedReference(refered.chainRefCols.iterator().next(),
@@ -280,7 +280,7 @@ public class DenormalizationResolver implements ContextRewriter {
       }
     }
 
-    private void replaceReferencedColumns(CandidateFact cfact, boolean replaceFact) throws SemanticException {
+    private void replaceReferencedColumns(CandidateFact cfact, boolean replaceFact) throws LensException {
       if (replaceFact
         && (tableToRefCols.get(cfact.getName()) != null && !tableToRefCols.get(cfact.getName()).isEmpty())) {
         resolveClause(cubeql, cfact.getSelectAST());
@@ -297,7 +297,7 @@ public class DenormalizationResolver implements ContextRewriter {
       resolveClause(cubeql, cubeql.getOrderByAST());
     }
 
-    private void resolveClause(CubeQueryContext query, ASTNode node) throws SemanticException {
+    private void resolveClause(CubeQueryContext query, ASTNode node) throws LensException {
       if (node == null) {
         return;
       }
@@ -366,7 +366,7 @@ public class DenormalizationResolver implements ContextRewriter {
    * replaced with the corresponding table reference
    */
   @Override
-  public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     DenormalizationContext denormCtx = cubeql.getDeNormCtx();
     if (denormCtx == null) {
       // Adds all the reference dimensions as eligible for denorm fields
@@ -400,8 +400,8 @@ public class DenormalizationResolver implements ContextRewriter {
           }
         }
         if (cubeql.getCandidateFacts().size() == 0) {
-          throw new SemanticException(ErrorMsg.NO_FACT_HAS_COLUMN, cubeql.getColumnsQueried(cubeql.getCube().getName())
-            .toString());
+          throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getValue(),
+              cubeql.getColumnsQueried(cubeql.getCube().getName()).toString());
         }
         cubeql.pruneCandidateFactSet(CandidateTablePruneCode.COLUMN_NOT_FOUND);
       }
@@ -422,7 +422,7 @@ public class DenormalizationResolver implements ContextRewriter {
           }
 
           if (cubeql.getCandidateDimTables().get(dim).size() == 0) {
-            throw new SemanticException(ErrorMsg.NO_DIM_HAS_COLUMN,
+            throw new LensException(LensCubeErrorCode.NO_DIM_HAS_COLUMN.getValue(),
               dim.toString(), cubeql.getColumnsQueried(dim.getName()).toString());
           }
         }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
index 3814cf6..bcfc1f6 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
@@ -24,9 +24,9 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.lens.cube.metadata.Dimension;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 
 /**
  * Dimension HQLContext.
@@ -47,7 +47,7 @@ abstract class DimHQLContext extends SimpleHQLContext {
 
   DimHQLContext(CubeQueryContext query, Map<Dimension, CandidateDim> dimsToQuery,
     Set<Dimension> queriedDims, String select, String where,
-    String groupby, String orderby, String having, Integer limit) throws SemanticException {
+    String groupby, String orderby, String having, Integer limit) throws LensException {
     super(select, groupby, orderby, having, limit);
     this.query = query;
     this.dimsToQuery = dimsToQuery;
@@ -55,7 +55,7 @@ abstract class DimHQLContext extends SimpleHQLContext {
     this.queriedDims = queriedDims;
   }
 
-  protected void setMissingExpressions() throws SemanticException {
+  protected void setMissingExpressions() throws LensException {
     setFrom(getFromString());
     setWhere(joinWithAnd(
       genWhereClauseWithDimPartitions(where), getQuery().getConf().getBoolean(
@@ -63,13 +63,13 @@ abstract class DimHQLContext extends SimpleHQLContext {
         ? getPostSelectionWhereClause() : null));
   }
 
-  protected String getPostSelectionWhereClause() throws SemanticException {
+  protected String getPostSelectionWhereClause() throws LensException {
     return null;
   }
 
 
 
-  protected String getFromString() throws SemanticException {
+  protected String getFromString() throws LensException {
     String fromString = getFromTable();
     if (query.isAutoJoinResolved()) {
       fromString =
@@ -82,7 +82,7 @@ abstract class DimHQLContext extends SimpleHQLContext {
 
   protected abstract CandidateFact getQueriedFact();
 
-  protected abstract String getFromTable() throws SemanticException;
+  protected abstract String getFromTable() throws LensException;
 
   public Map<Dimension, CandidateDim> getDimsToQuery() {
     return dimsToQuery;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/3dc348ac/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
index 922501d..0c43d98 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
@@ -22,8 +22,7 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.lens.cube.metadata.Dimension;
-
-import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.lens.server.api.error.LensException;
 
 /**
  * HQL context class which passes all query strings from {@link CubeQueryContext} and works with all dimensions to be
@@ -33,23 +32,23 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
  */
 class DimOnlyHQLContext extends DimHQLContext {
 
-  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query) throws SemanticException {
+  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query) throws LensException {
     super(query, dimsToQuery, dimsToQuery.keySet(), query.getSelectTree(),
       query.getWhereTree(), query.getGroupByTree(), query.getOrderByTree(),
       query.getHavingTree(), query.getLimitValue());
   }
 
   DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query, String whereClause)
-    throws SemanticException {
+    throws LensException {
     super(query, dimsToQuery, dimsToQuery.keySet(), query.getSelectTree(), whereClause, query.getGroupByTree(), query
         .getOrderByTree(), query.getHavingTree(), query.getLimitValue());
   }
 
-  public String toHQL() throws SemanticException {
+  public String toHQL() throws LensException {
     return query.getInsertClause() + super.toHQL();
   }
 
-  protected String getFromTable() throws SemanticException {
+  protected String getFromTable() throws LensException {
     if (query.getAutoJoinCtx() != null && query.getAutoJoinCtx().isJoinsResolved()) {
       return getDimsToQuery().get(query.getAutoJoinCtx().getAutoJoinTarget()).getStorageString(
         query.getAliasForTableName(query.getAutoJoinCtx().getAutoJoinTarget().getName()));