You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by de...@apache.org on 2016/02/03 13:10:21 UTC

[01/51] [abbrv] lens git commit: LENS-885: Cleanup of Cube test cases

Repository: lens
Updated Branches:
  refs/heads/current-release-line 79261f958 -> 15396047b


http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
index d16ea4c..36c1dba 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
@@ -19,6 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import java.util.*;
@@ -55,18 +56,16 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
   @Test
   public void testDenormsAsDirectFields() throws ParseException, LensException, HiveException {
     // denorm fields directly available
-    String twoDaysITRange =
-      "time_range_in(it, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
-    String hqlQuery = rewrite("select dim2big1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf);
+    String hqlQuery = rewrite("select dim2big1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT,
+      conf);
     String expecteddim2big1 =
       getExpectedQuery(cubeName, "select testcube.dim2big1," + " max(testcube.msr3), sum(testcube.msr2) FROM ", null,
         " group by testcube.dim2big1", getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary4"),
         null);
     TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big1);
     // with another table
-    hqlQuery = rewrite("select dim2big1, citydim.name, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange,
-      conf);
+    hqlQuery = rewrite("select dim2big1, citydim.name, max(msr3)," + " msr2 from testCube" + " where "
+      + TWO_DAYS_RANGE_IT, conf);
     String expecteddim2big1WithAnotherTable = getExpectedQuery(cubeName,
       "select testcube.dim2big1, citydim.name, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
         + getDbName() + "c1_citytable citydim " + "on testcube.cityid = citydim.id and citydim.dt = 'latest' ", null,
@@ -75,7 +74,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
       null);
     TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big1WithAnotherTable);
 
-    hqlQuery = rewrite("select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf);
+    hqlQuery = rewrite("select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT, conf);
     String expecteddim2big2 =
       getExpectedQuery(cubeName, "select testcube.dim2big2, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
         " group by testcube.dim2big2", getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary4"),
@@ -84,8 +83,8 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
 
     Configuration conf2 = new Configuration(conf);
     conf2.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
-    hqlQuery =
-      rewrite("select testdim3.name, dim2big1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf2);
+    hqlQuery = rewrite("select testdim3.name, dim2big1, max(msr3)," + " msr2 from testCube" + " where "
+      + TWO_DAYS_RANGE_IT, conf2);
     String expected =
       getExpectedQuery(cubeName,
         "select testdim3.name, testcube.dim2big1, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
@@ -96,9 +95,9 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
         null);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
-    hqlQuery = rewrite("select dim2big1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf2);
+    hqlQuery = rewrite("select dim2big1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT, conf2);
     TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big1);
-    hqlQuery = rewrite("select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf2);
+    hqlQuery = rewrite("select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT, conf2);
     TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big2);
   }
 
@@ -190,11 +189,8 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
 
   @Test
   public void testCubeQueryWithExpressionHavingDenormColumnComingAsDirectColumn() throws Exception {
-    String twoDaysITRange =
-      "time_range_in(it, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
-    String hqlQuery = rewrite("select substrdim2big1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange,
-      conf);
+    String hqlQuery = rewrite("select substrdim2big1, max(msr3)," + " msr2 from testCube" + " where "
+      + TWO_DAYS_RANGE_IT, conf);
     String expecteddim2big1 =
       getExpectedQuery(cubeName, "select substr(testcube.dim2big1, 5), max(testcube.msr3), sum(testcube.msr2) FROM ",
         null, " group by substr(testcube.dim2big1, 5)",

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
index 0d1f9fe..f48e1b7 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
@@ -19,11 +19,11 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.TWO_DAYS_RANGE;
+
 import java.util.ArrayList;
 import java.util.List;
 
-import static org.apache.lens.cube.parse.CubeTestSetup.TWO_DAYS_RANGE;
-
 import org.apache.lens.cube.parse.ExpressionResolver.ExprSpecContext;
 
 import org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
index 1e21fb0..e77f919 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
@@ -19,6 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
index ea561b6..d9e442d 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
@@ -19,6 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import static org.testng.Assert.*;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java
index 4a23818..b98fdfb 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java
@@ -19,6 +19,8 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
+
 import java.text.DateFormat;
 import java.util.ArrayList;
 import java.util.List;
@@ -43,13 +45,13 @@ public class TestORTimeRangeWriter extends TestTimeRangeWriter {
   public void validateDisjoint(String whereClause, DateFormat format) {
     List<String> parts = new ArrayList<String>();
     if (format == null) {
-      parts.add(UpdatePeriod.MONTHLY.format(CubeTestSetup.TWO_MONTHS_BACK));
-      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.TWODAYS_BACK));
-      parts.add(UpdatePeriod.HOURLY.format(CubeTestSetup.NOW));
+      parts.add(UpdatePeriod.MONTHLY.format(TWO_MONTHS_BACK));
+      parts.add(UpdatePeriod.DAILY.format(TWODAYS_BACK));
+      parts.add(UpdatePeriod.HOURLY.format(NOW));
     } else {
-      parts.add(format.format(CubeTestSetup.TWO_MONTHS_BACK));
-      parts.add(format.format(CubeTestSetup.TWODAYS_BACK));
-      parts.add(format.format(CubeTestSetup.NOW));
+      parts.add(format.format(TWO_MONTHS_BACK));
+      parts.add(format.format(TWODAYS_BACK));
+      parts.add(format.format(NOW));
     }
 
     System.out.println("Expected :" + StorageUtil.getWherePartClause("dt", "test", parts));
@@ -60,30 +62,16 @@ public class TestORTimeRangeWriter extends TestTimeRangeWriter {
   public void validateConsecutive(String whereClause, DateFormat format) {
     List<String> parts = new ArrayList<String>();
     if (format == null) {
-      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.ONE_DAY_BACK));
-      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.TWODAYS_BACK));
-      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.NOW));
-    } else {
-      parts.add(format.format(CubeTestSetup.ONE_DAY_BACK));
-      parts.add(format.format(CubeTestSetup.TWODAYS_BACK));
-      parts.add(format.format(CubeTestSetup.NOW));
-    }
-
-    System.out.println("Expected :" + StorageUtil.getWherePartClause("dt", "test", parts));
-    Assert.assertEquals(whereClause, StorageUtil.getWherePartClause("dt", "test", parts));
-  }
-
-  @Override
-  public void validateSingle(String whereClause, DateFormat format) {
-    List<String> parts = new ArrayList<String>();
-    if (format == null) {
-      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.ONE_DAY_BACK));
+      parts.add(getDateStringWithOffset(UpdatePeriod.DAILY, -1));
+      parts.add(getDateStringWithOffset(UpdatePeriod.DAILY, -2));
+      parts.add(getDateStringWithOffset(UpdatePeriod.DAILY, 0));
     } else {
-      parts.add(format.format(CubeTestSetup.ONE_DAY_BACK));
+      parts.add(format.format(getDateWithOffset(UpdatePeriod.DAILY, -1)));
+      parts.add(format.format(getDateWithOffset(UpdatePeriod.DAILY, -2)));
+      parts.add(format.format(getDateWithOffset(UpdatePeriod.DAILY, 0)));
     }
 
     System.out.println("Expected :" + StorageUtil.getWherePartClause("dt", "test", parts));
     Assert.assertEquals(whereClause, StorageUtil.getWherePartClause("dt", "test", parts));
   }
-
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
index 255aade..571f7de 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
@@ -19,7 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
-import static org.apache.lens.cube.parse.CubeTestSetup.TWO_DAYS_RANGE;
+import static org.apache.lens.cube.metadata.DateFactory.TWO_DAYS_RANGE;
 
 import java.util.Arrays;
 

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
index 4d3a3dc..5a072e4 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
@@ -19,7 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
-import static org.apache.lens.cube.parse.CubeTestSetup.TWO_DAYS_RANGE;
+import static org.apache.lens.cube.metadata.DateFactory.TWO_DAYS_RANGE;
 
 import java.util.Arrays;
 import java.util.Collections;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java
index 73c3338..0069609 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java
@@ -19,6 +19,8 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
+
 import java.util.*;
 
 import org.apache.lens.cube.metadata.FactPartition;
@@ -64,9 +66,9 @@ public class TestStorageUtil {
     Configuration conf = new Configuration();
     // {s1,s2,s3}, {s3}, {s3} -> {s3}
     List<FactPartition> answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s123));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s3));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s3));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s123));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s3));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s3));
     Map<String, Set<FactPartition>> result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -74,15 +76,15 @@ public class TestStorageUtil {
     Assert.assertEquals("S3", result.keySet().iterator().next());
     Set<FactPartition> coveredParts = result.get("S3");
     Assert.assertEquals(3, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+    Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
+    Assert.assertTrue(contains(coveredParts, NOW));
 
     // {s1,s2,s3}, {s4}, {s5} - > {s1,s4,s5} or {s2,s4,s5} or {s3,s4,s5}
     answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s123));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s4));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s5));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s123));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s4));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s5));
     result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -93,10 +95,10 @@ public class TestStorageUtil {
       || result.keySet().contains("S3"));
     coveredParts = result.get("S4");
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
     coveredParts = result.get("S5");
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+    Assert.assertTrue(contains(coveredParts, NOW));
     coveredParts = result.get("S1");
     if (coveredParts == null) {
       coveredParts = result.get("S2");
@@ -105,13 +107,13 @@ public class TestStorageUtil {
       coveredParts = result.get("S3");
     }
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
 
     // {s1}, {s2}, {s3} -> {s1,s2,s3}
     answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s1));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s2));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s3));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s1));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s2));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s3));
     result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -121,19 +123,19 @@ public class TestStorageUtil {
     Assert.assertTrue(result.keySet().contains("S3"));
     coveredParts = result.get("S1");
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
     coveredParts = result.get("S2");
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
     coveredParts = result.get("S3");
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+    Assert.assertTrue(contains(coveredParts, NOW));
 
     // {s1, s2}, {s2, s3}, {s4} -> {s2,s4}
     answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s23));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s4));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s23));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s4));
     result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -142,17 +144,17 @@ public class TestStorageUtil {
     Assert.assertTrue(result.keySet().contains("S4"));
     coveredParts = result.get("S2");
     Assert.assertEquals(2, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
     coveredParts = result.get("S4");
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+    Assert.assertTrue(contains(coveredParts, NOW));
 
     // {s1, s2}, {s2, s4}, {s4} -> {s1,s4} or {s2,s4}
     answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s24));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s4));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s24));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s4));
     result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -163,32 +165,32 @@ public class TestStorageUtil {
     if (coveredParts == null) {
       coveredParts = result.get("S2");
       Assert.assertTrue(coveredParts.size() >= 1);
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+      Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
       if (coveredParts.size() == 2) {
-        Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+        Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
         Assert.assertEquals(1, result.get("S4").size());
       }
       coveredParts = result.get("S4");
       Assert.assertTrue(coveredParts.size() >= 1);
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+      Assert.assertTrue(contains(coveredParts, NOW));
       if (coveredParts.size() == 2) {
-        Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+        Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
         Assert.assertEquals(1, result.get("S2").size());
       }
     } else {
       Assert.assertEquals(1, coveredParts.size());
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+      Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
       coveredParts = result.get("S4");
       Assert.assertTrue(coveredParts.size() >= 1);
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+      Assert.assertTrue(contains(coveredParts, NOW));
+      Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
     }
 
     // {s1, s2}, {s2, s3}, {s3,s4} -> {s2,s3} or {s1, s3} or {s2, s4}
     answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s23));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s34));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s23));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s34));
     result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -204,24 +206,24 @@ public class TestStorageUtil {
     //Assert.assertTrue(result.keySet().contains("S3"));
     /* coveredParts = result.get("S2");
     Assert.assertTrue(coveredParts.size() >= 1);
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
     if (coveredParts.size() == 2) {
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+      Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
       Assert.assertEquals(1, result.get("S3").size());
     }
     coveredParts = result.get("S3");
     Assert.assertTrue(coveredParts.size() >= 1);
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+    Assert.assertTrue(contains(coveredParts, NOW));
     if (coveredParts.size() == 2) {
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+      Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
       Assert.assertEquals(1, result.get("S2").size());
     }*/
 
     // {s1, s2}, {s2}, {s1} -> {s1,s2}
     answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s2));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s1));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s2));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s1));
     result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -230,16 +232,16 @@ public class TestStorageUtil {
     Assert.assertTrue(result.keySet().contains("S2"));
     coveredParts = result.get("S2");
     Assert.assertTrue(coveredParts.size() >= 1);
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
     if (coveredParts.size() == 2) {
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+      Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
       Assert.assertEquals(1, result.get("S1").size());
     }
     coveredParts = result.get("S1");
     Assert.assertTrue(coveredParts.size() >= 1);
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+    Assert.assertTrue(contains(coveredParts, NOW));
     if (coveredParts.size() == 2) {
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+      Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
       Assert.assertEquals(1, result.get("S2").size());
     }
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
index a431717..eb8c6eb 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
@@ -19,12 +19,15 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import java.util.List;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.TestCubeMetastoreClient;
+import org.apache.lens.cube.metadata.TimeRange;
+import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
@@ -39,14 +42,10 @@ import org.testng.annotations.Test;
 
 public class TestTimeRangeExtractor extends TestQueryRewrite {
   private CubeQueryRewriter driver;
-  private String dateNow;
-  private String dateTwoDaysBack;
 
   @BeforeTest
   public void setupInstance() throws Exception {
     driver = new CubeQueryRewriter(new Configuration(), new HiveConf());
-    dateTwoDaysBack = getDateUptoHours(TWODAYS_BACK);
-    dateNow = getDateUptoHours(NOW);
   }
 
   @AfterTest
@@ -61,7 +60,8 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
 
   @Test
   public void testTimeRangeValidation() throws Exception {
-    String timeRange2 = " time_range_in(d_time, '" + dateNow + "','" + dateTwoDaysBack + "')";
+    // reverse range
+    String timeRange2 = getTimeRangeString(UpdatePeriod.DAILY, 0, -2, UpdatePeriod.HOURLY);
     try {
       // this should throw exception because from date is after to date
       driver.rewrite("SELECT cityid, testCube.msr2 from" + " testCube where " + timeRange2);
@@ -74,7 +74,8 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
 
   @Test
   public void testEqualTimeRangeValidation() throws Exception {
-    String equalTimeRange = " time_range_in(d_time, '" + dateNow + "','" + dateNow + "')";
+    // zero range
+    String equalTimeRange = getTimeRangeString(UpdatePeriod.HOURLY, 0, 0);
     try {
       // this should throw exception because from date and to date are same
       driver.rewrite("SELECT cityid, testCube.msr2 from" + " testCube where " + equalTimeRange);
@@ -87,18 +88,16 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
 
   @Test
   public void testNoNPE() throws Exception {
-    String timeRange = " time_range_in(d_time, '" + dateTwoDaysBack + "','" + dateNow + "')";
-    String q1 = "SELECT cityid, testCube.msr2 from testCube where " + timeRange + " AND cityid IS NULL";
+    String q1 = "SELECT cityid, testCube.msr2 from testCube where " + TWO_DAYS_RANGE + " AND cityid IS NULL";
     rewrite(driver, q1);
-    q1 = "SELECT cityid, testCube.msr2 from testCube where cityid IS NULL AND " + timeRange;
+    q1 = "SELECT cityid, testCube.msr2 from testCube where cityid IS NULL AND " + TWO_DAYS_RANGE;
     rewrite(driver, q1);
   }
 
   @Test
   public void testTimeRangeASTPosition() throws Exception {
     // check that time range can be any child of AND
-    String timeRange = " time_range_in(d_time, '" + dateTwoDaysBack + "','" + dateNow + "')";
-    String q1 = "SELECT cityid, testCube.msr2 from testCube where " + timeRange + " AND cityid=1";
+    String q1 = "SELECT cityid, testCube.msr2 from testCube where " + TWO_DAYS_RANGE + " AND cityid=1";
     CubeQueryContext cubeql = driver.rewrite(q1);
     String hql = cubeql.toHQL();
   }
@@ -106,10 +105,9 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
   @Test
   public void testPartitionColNameExtract() throws Exception {
     String q2 =
-      "SELECT cityid, testCube.msr3 from testCube where cityid=1 AND " + " time_range_in(d_time, '" + dateTwoDaysBack
-        + "','" + dateNow + "')";
+      "SELECT cityid, testCube.msr3 from testCube where cityid=1 AND " + TWO_DAYS_RANGE;
     CubeQueryContext cubeql = driver.rewrite(q2);
-    String hql = cubeql.toHQL();
+    cubeql.toHQL();
     // Check that column name in time range is extracted properly
     TimeRange range = cubeql.getTimeRanges().get(0);
     Assert.assertNotNull(range);
@@ -124,12 +122,11 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
     String dateNow = getDateUptoHours(NOW);
     // time range within time range
     String q3 =
-      "SELECT cityid, testCube.msr3 FROM testCube where cityid=1 AND" + "  (time_range_in(d_time, '" + dateTwoDaysBack
-        + "','" + dateNow + "')  "
+      "SELECT cityid, testCube.msr3 FROM testCube where cityid=1 AND (" + TWO_DAYS_RANGE
         // Time range as sibling of the first time range
-        + " OR " + " time_range_in(d_time, '" + dateTwoDaysBack + "', '" + dateNow + "'))";
+        + " OR " + TWO_DAYS_RANGE + ")";
     CubeQueryContext cubeql = driver.rewrite(q3);
-    String hql = cubeql.toHQL();
+    cubeql.toHQL();
 
     List<TimeRange> ranges = cubeql.getTimeRanges();
     Assert.assertEquals(2, ranges.size());

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
index 1fc8bc8..da0e4f4 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
@@ -19,9 +19,9 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.COLUMN_NOT_FOUND;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.FACT_NOT_AVAILABLE_IN_RANGE;
-import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import static org.testng.Assert.assertEquals;
 import static org.testng.Assert.assertTrue;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
index 0248409..87e128f 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
@@ -19,14 +19,18 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
+import static org.apache.lens.cube.metadata.UpdatePeriod.*;
+
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
+import java.util.ArrayList;
 import java.util.LinkedHashSet;
+import java.util.List;
 import java.util.Set;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.FactPartition;
-import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.server.api.error.LensException;
 
 import org.testng.Assert;
@@ -45,16 +49,26 @@ public abstract class TestTimeRangeWriter {
 
   public abstract void validateConsecutive(String whereClause, DateFormat format);
 
-  public abstract void validateSingle(String whereClause, DateFormat object);
+  public void validateSingle(String whereClause, DateFormat format) {
+    List<String> parts = new ArrayList<String>();
+    if (format == null) {
+      parts.add(getDateStringWithOffset(DAILY, -1));
+    } else {
+      parts.add(format.format(getDateWithOffset(DAILY, -1)));
+    }
+
+    System.out.println("Expected :" + StorageUtil.getWherePartClause("dt", "test", parts));
+    Assert.assertEquals(whereClause, StorageUtil.getWherePartClause("dt", "test", parts));
+  }
 
   public static final DateFormat DB_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
 
   @Test
   public void testDisjointParts() {
     Set<FactPartition> answeringParts = new LinkedHashSet<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(MONTHLY, -2), MONTHLY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -2), DAILY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(HOURLY, 0), HOURLY, null, null));
 
     LensException th = null;
     String whereClause = null;
@@ -76,10 +90,10 @@ public abstract class TestTimeRangeWriter {
     }
 
     // test with format
-    answeringParts = new LinkedHashSet<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, DB_FORMAT));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, DB_FORMAT));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, DB_FORMAT));
+    answeringParts = new LinkedHashSet<>();
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(MONTHLY, -2), MONTHLY, null, DB_FORMAT));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -2), DAILY, null, DB_FORMAT));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(HOURLY, 0), HOURLY, null, DB_FORMAT));
 
     th = null;
     try {
@@ -100,17 +114,17 @@ public abstract class TestTimeRangeWriter {
   @Test
   public void testConsecutiveDayParts() throws LensException {
     Set<FactPartition> answeringParts = new LinkedHashSet<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.ONE_DAY_BACK, UpdatePeriod.DAILY, null, null));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.DAILY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -1), DAILY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -2), DAILY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, 0), DAILY, null, null));
 
     String whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
     validateConsecutive(whereClause, null);
 
     answeringParts = new LinkedHashSet<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.ONE_DAY_BACK, UpdatePeriod.DAILY, null, DB_FORMAT));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, DB_FORMAT));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.DAILY, null, DB_FORMAT));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -1), DAILY, null, DB_FORMAT));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -2), DAILY, null, DB_FORMAT));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, 0), DAILY, null, DB_FORMAT));
 
     whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
     validateConsecutive(whereClause, DB_FORMAT);
@@ -119,12 +133,12 @@ public abstract class TestTimeRangeWriter {
   @Test
   public void testSinglePart() throws LensException {
     Set<FactPartition> answeringParts = new LinkedHashSet<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.ONE_DAY_BACK, UpdatePeriod.DAILY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -1), DAILY, null, null));
     String whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
     validateSingle(whereClause, null);
 
     answeringParts = new LinkedHashSet<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.ONE_DAY_BACK, UpdatePeriod.DAILY, null, DB_FORMAT));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -1), DAILY, null, DB_FORMAT));
     whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
     validateSingle(whereClause, DB_FORMAT);
 

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
index 7bd7b6b..b7372f1 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
@@ -19,6 +19,11 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.error.LensCubeErrorCode.CANNOT_USE_TIMERANGE_WRITER;
+import static org.apache.lens.cube.metadata.DateFactory.*;
+import static org.apache.lens.cube.metadata.UpdatePeriod.CONTINUOUS;
+import static org.apache.lens.cube.metadata.UpdatePeriod.DAILY;
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import java.text.DateFormat;
@@ -28,7 +33,6 @@ import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.server.api.error.LensException;
 
@@ -45,7 +49,7 @@ import lombok.extern.slf4j.Slf4j;
 public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
 
   private Configuration conf;
-  private final String cubeName = CubeTestSetup.TEST_CUBE_NAME;
+  private final String cubeName = TEST_CUBE_NAME;
 
   @BeforeTest
   public void setupDriver() throws Exception {
@@ -84,39 +88,29 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
       th = e;
       log.error("Semantic exception while testing cube query.", e);
     }
-    if (!CubeTestSetup.isZerothHour()) {
+    if (!isZerothHour()) {
       Assert.assertNotNull(th);
       Assert
-      .assertEquals(th.getErrorCode(), LensCubeErrorCode.CANNOT_USE_TIMERANGE_WRITER.getLensErrorInfo().getErrorCode());
+      .assertEquals(th.getErrorCode(), CANNOT_USE_TIMERANGE_WRITER.getLensErrorInfo().getErrorCode());
     }
     // hourly partitions for two days
-    conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
-
+    conf.setBoolean(FAIL_QUERY_ON_PARTIAL_DATA, true);
     DateFormat qFmt = new SimpleDateFormat("yyyy-MM-dd-HH:mm:ss");
-    Calendar qCal = Calendar.getInstance();
-    Date toDate = qCal.getTime();
-    String qTo = qFmt.format(toDate);
-    qCal.setTime(TWODAYS_BACK);
-    Date from2DaysBackDate = qCal.getTime();
-    String qFrom = qFmt.format(from2DaysBackDate);
-    String twoDaysInRangeClause = " time_range_in(d_time, '"+ qFrom + "', '" + qTo + "')";
+    String twoDaysInRangeClause = getTimeRangeString(DAILY, -2, 0, qFmt);
 
     String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + twoDaysInRangeClause, conf);
     Map<String, String> whereClauses = new HashMap<String, String>();
     whereClauses.put(
-      CubeTestSetup.getDbName() + "c1_testfact",
-      TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt", from2DaysBackDate, toDate,
-          UpdatePeriod.CONTINUOUS.format()));
+      getDbName() + "c1_testfact",
+      TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt",
+        getDateWithOffset(DAILY, -2), getDateWithOffset(DAILY, 0), CONTINUOUS.format()));
     String expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, whereClauses);
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     // multiple range query
-    //from date 4 days back
-    qCal.setTime(BEFORE_4_DAYS_START);
-    Date from4DaysBackDate = qCal.getTime();
-    String qFrom4Days = qFmt.format(from4DaysBackDate);
-    String fourDaysInRangeClause = " time_range_in(d_time, '"+ qFrom4Days + "', '" + qTo + "')";
+    //from date 6 days back
+    String fourDaysInRangeClause = getTimeRangeString(DAILY, -6, 0, qFmt);
 
     hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " where " + twoDaysInRangeClause + " OR "
@@ -124,12 +118,12 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
 
     whereClauses = new HashMap<String, String>();
     whereClauses.put(
-      CubeTestSetup.getDbName() + "c1_testfact",
-      TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt", from2DaysBackDate, toDate,
-          UpdatePeriod.CONTINUOUS.format())
+      getDbName() + "c1_testfact",
+      TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt", getDateWithOffset(DAILY, -2),
+        getDateWithOffset(DAILY, 0), CONTINUOUS.format())
         + " OR"
-        + TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt", from4DaysBackDate, toDate,
-        UpdatePeriod.CONTINUOUS.format()));
+        + TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt", getDateWithOffset(DAILY, -6),
+        getDateWithOffset(DAILY, 0), CONTINUOUS.format()));
     expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, whereClauses);
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
@@ -138,9 +132,9 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     conf.set(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT, "yyyy-MM-dd HH:mm:ss");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     whereClauses = new HashMap<String, String>();
-    whereClauses.put(CubeTestSetup.getDbName() + "c1_testfact", TestBetweenTimeRangeWriter.getBetweenClause(cubeName,
-      "dt", getUptoHour(CubeTestSetup.TWODAYS_BACK),
-      getUptoHour(CubeTestSetup.NOW), TestTimeRangeWriter.DB_FORMAT));
+    whereClauses.put(getDbName() + "c1_testfact", TestBetweenTimeRangeWriter.getBetweenClause(cubeName,
+      "dt", getUptoHour(TWODAYS_BACK),
+      getUptoHour(NOW), TestTimeRangeWriter.DB_FORMAT));
     expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, whereClauses);
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
@@ -150,20 +144,19 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
   public void testCubeQueryWithTimeDim() throws Exception {
     Configuration tconf = new Configuration(conf);
     // hourly partitions for two days
-    tconf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
+    tconf.setBoolean(FAIL_QUERY_ON_PARTIAL_DATA, true);
     tconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C4");
     tconf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, false);
     tconf.set(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT, "yyyy-MM-dd HH:mm:ss");
     tconf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C4"), "MONTHLY,DAILY,HOURLY");
 
     String query =
-      "SELECT test_time_dim, msr2 FROM testCube where " + "time_range_in(test_time_dim, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
+      "SELECT test_time_dim, msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD;
     String hqlQuery = rewrite(query, tconf);
     Map<String, String> whereClauses = new HashMap<String, String>();
-    whereClauses.put(CubeTestSetup.getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause("hourdim",
-      "full_hour", getUptoHour(CubeTestSetup.TWODAYS_BACK),
-      getUptoHour(getOneLess(CubeTestSetup.NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT));
+    whereClauses.put(getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause("hourdim",
+      "full_hour", getUptoHour(TWODAYS_BACK),
+      getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT));
     System.out.println("HQL:" + hqlQuery);
     String expected =
       getExpectedQuery(cubeName, "select hourdim.full_hour, sum(testcube.msr2) FROM ", " join " + getDbName()
@@ -172,8 +165,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     query =
-      "SELECT msr2 FROM testCube where " + "time_range_in(test_time_dim, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
+      "SELECT msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD;
     hqlQuery = rewrite(query, tconf);
     System.out.println("HQL:" + hqlQuery);
     expected =
@@ -182,9 +174,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     query =
-      "SELECT msr2 FROM testCube where testcube.cityid > 2 and " + "time_range_in(test_time_dim, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW)
-        + "') and testcube.cityid != 5";
+      "SELECT msr2 FROM testCube where testcube.cityid > 2 and " + TWO_DAYS_RANGE_TTD + " and testcube.cityid != 5";
     hqlQuery = rewrite(query, tconf);
     System.out.println("HQL:" + hqlQuery);
     expected =
@@ -196,20 +186,18 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     // multiple range query
     hqlQuery =
       rewrite(
-        "select SUM(msr2) from testCube" + " where time_range_in(test_time_dim, '"
-          + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')"
-          + " OR time_range_in(test_time_dim, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')", tconf);
+        "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE_TTD
+          + " OR " + TWO_DAYS_RANGE_TTD_BEFORE_4_DAYS, tconf);
 
-    whereClauses = new HashMap<String, String>();
+    whereClauses = new HashMap<>();
     whereClauses.put(
-      CubeTestSetup.getDbName() + "c4_testfact2",
-      TestBetweenTimeRangeWriter.getBetweenClause("hourdim", "full_hour", getUptoHour(CubeTestSetup.TWODAYS_BACK),
-        getUptoHour(getOneLess(CubeTestSetup.NOW, UpdatePeriod.HOURLY.calendarField())),
+      getDbName() + "c4_testfact2",
+      TestBetweenTimeRangeWriter.getBetweenClause("hourdim", "full_hour", getUptoHour(TWODAYS_BACK),
+        getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())),
         TestTimeRangeWriter.DB_FORMAT)
         + " OR "
-        + TestBetweenTimeRangeWriter.getBetweenClause("hourdim", "full_hour", getUptoHour(BEFORE_4_DAYS_START),
-        getUptoHour(getOneLess(BEFORE_4_DAYS_END, UpdatePeriod.HOURLY.calendarField())),
+        + TestBetweenTimeRangeWriter.getBetweenClause("hourdim", "full_hour", getUptoHour(BEFORE_6_DAYS),
+        getUptoHour(getOneLess(BEFORE_4_DAYS, UpdatePeriod.HOURLY.calendarField())),
         TestTimeRangeWriter.DB_FORMAT));
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
@@ -219,10 +207,8 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
 
     hqlQuery =
       rewrite(
-        "select to_date(test_time_dim), SUM(msr2) from testCube" + " where time_range_in(test_time_dim, '"
-          + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')"
-          + " OR time_range_in(test_time_dim, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')", tconf);
+        "select to_date(test_time_dim), SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE_TTD
+          + " OR " + TWO_DAYS_RANGE_TTD_BEFORE_4_DAYS, tconf);
 
     expected =
       getExpectedQuery(cubeName, "select to_date(hourdim.full_hour), sum(testcube.msr2) FROM ", " join "
@@ -236,20 +222,19 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
   public void testCubeQueryWithTimeDimThruChain() throws Exception {
     // hourly partitions for two days
     Configuration tconf = new Configuration(conf);
-    tconf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
+    tconf.setBoolean(FAIL_QUERY_ON_PARTIAL_DATA, true);
     tconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C4");
     tconf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, false);
     tconf.set(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT, "yyyy-MM-dd HH:mm:ss");
     tconf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C4"), "MONTHLY,DAILY,HOURLY");
 
     String query =
-      "SELECT test_time_dim2, msr2 FROM testCube where " + "time_range_in(test_time_dim2, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
+      "SELECT test_time_dim2, msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD2;
     String hqlQuery = rewrite(query, tconf);
     Map<String, String> whereClauses = new HashMap<String, String>();
-    whereClauses.put(CubeTestSetup.getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause(
-      "timehourchain", "full_hour", getUptoHour(CubeTestSetup.TWODAYS_BACK),
-      getUptoHour(getOneLess(CubeTestSetup.NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT));
+    whereClauses.put(getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause(
+      "timehourchain", "full_hour", getUptoHour(TWODAYS_BACK),
+      getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT));
     System.out.println("HQL:" + hqlQuery);
     String expected =
       getExpectedQuery(cubeName, "select timehourchain.full_hour, sum(testcube.msr2) FROM ", " join " + getDbName()
@@ -258,8 +243,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     query =
-      "SELECT msr2 FROM testCube where " + "time_range_in(test_time_dim2, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
+      "SELECT msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD2;
     hqlQuery = rewrite(query, tconf);
     System.out.println("HQL:" + hqlQuery);
     expected =
@@ -269,9 +253,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     query =
-      "SELECT msr2 FROM testCube where testcube.cityid > 2 and " + "time_range_in(test_time_dim2, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW)
-        + "') and testcube.cityid != 5";
+      "SELECT msr2 FROM testCube where testcube.cityid > 2 and " + TWO_DAYS_RANGE_TTD2 + " and testcube.cityid != 5";
     hqlQuery = rewrite(query, tconf);
     System.out.println("HQL:" + hqlQuery);
     expected =
@@ -283,20 +265,18 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     // multiple range query
     hqlQuery =
       rewrite(
-        "select SUM(msr2) from testCube" + " where time_range_in(test_time_dim2, '"
-          + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')"
-          + " OR time_range_in(test_time_dim2, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')", tconf);
+        "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE_TTD2
+          + " OR " + TWO_DAYS_RANGE_TTD2_BEFORE_4_DAYS, tconf);
 
     whereClauses = new HashMap<String, String>();
     whereClauses.put(
-      CubeTestSetup.getDbName() + "c4_testfact2",
-      TestBetweenTimeRangeWriter.getBetweenClause("timehourchain", "full_hour", getUptoHour(CubeTestSetup.TWODAYS_BACK),
-        getUptoHour(getOneLess(CubeTestSetup.NOW, UpdatePeriod.HOURLY.calendarField())),
+      getDbName() + "c4_testfact2",
+      TestBetweenTimeRangeWriter.getBetweenClause("timehourchain", "full_hour", getUptoHour(TWODAYS_BACK),
+        getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())),
         TestTimeRangeWriter.DB_FORMAT)
         + " OR "
-        + TestBetweenTimeRangeWriter.getBetweenClause("timehourchain", "full_hour", getUptoHour(BEFORE_4_DAYS_START),
-        getUptoHour(getOneLess(BEFORE_4_DAYS_END, UpdatePeriod.HOURLY.calendarField())),
+        + TestBetweenTimeRangeWriter.getBetweenClause("timehourchain", "full_hour", getUptoHour(BEFORE_6_DAYS),
+        getUptoHour(getOneLess(BEFORE_4_DAYS, UpdatePeriod.HOURLY.calendarField())),
         TestTimeRangeWriter.DB_FORMAT));
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
@@ -307,10 +287,8 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
 
     hqlQuery =
       rewrite(
-        "select to_date(test_time_dim2), SUM(msr2) from testCube" + " where time_range_in(test_time_dim2, '"
-          + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')"
-          + " OR time_range_in(test_time_dim2, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')", tconf);
+        "select to_date(test_time_dim2), SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE_TTD2
+          + " OR " +TWO_DAYS_RANGE_TTD2_BEFORE_4_DAYS, tconf);
 
     expected =
       getExpectedQuery(cubeName, "select to_date(timehourchain.full_hour), sum(testcube.msr2) FROM ", " join "

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-server/src/main/java/org/apache/lens/server/query/QueryResultPurger.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryResultPurger.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryResultPurger.java
index 54c6574..2be11ea 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryResultPurger.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryResultPurger.java
@@ -28,7 +28,7 @@ import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.lens.cube.parse.DateUtil;
+import org.apache.lens.cube.metadata.DateUtil;
 import org.apache.lens.server.LensServices;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.metrics.MetricsService;


[47/51] [abbrv] lens git commit: LENS-920 : Fix issues in producing and consuming json for all api

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
index 925fc86..4bc3f0a 100644
--- a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
@@ -29,10 +29,7 @@ import javax.ws.rs.BadRequestException;
 import javax.ws.rs.NotFoundException;
 import javax.ws.rs.client.Entity;
 import javax.ws.rs.client.WebTarget;
-import javax.ws.rs.core.Application;
-import javax.ws.rs.core.GenericType;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import javax.ws.rs.core.*;
 import javax.xml.bind.JAXBElement;
 import javax.xml.datatype.DatatypeFactory;
 import javax.xml.datatype.XMLGregorianCalendar;
@@ -44,6 +41,7 @@ import org.apache.lens.api.LensSessionHandle;
 import org.apache.lens.api.StringList;
 import org.apache.lens.api.error.LensCommonErrorCode;
 import org.apache.lens.api.metastore.*;
+import org.apache.lens.api.metastore.ObjectFactory;
 import org.apache.lens.api.result.LensAPIResult;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
@@ -62,15 +60,9 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 
-import org.glassfish.jersey.client.ClientConfig;
-import org.glassfish.jersey.media.multipart.FormDataBodyPart;
-import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
-import org.glassfish.jersey.media.multipart.FormDataMultiPart;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.glassfish.jersey.test.TestProperties;
 import org.testng.Assert;
-import org.testng.annotations.AfterTest;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Test;
+import org.testng.annotations.*;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
@@ -80,8 +72,6 @@ import lombok.extern.slf4j.Slf4j;
 @Test(groups = "unit-test")
 public class TestMetastoreService extends LensJerseyTest {
   private ObjectFactory cubeObjectFactory;
-  protected String mediaType = MediaType.APPLICATION_XML;
-  protected MediaType medType = MediaType.APPLICATION_XML_TYPE;
   protected String dbPFX = "TestMetastoreService_";
   CubeMetastoreServiceImpl metastoreService;
   LensSessionHandle lensSessionId;
@@ -90,7 +80,7 @@ public class TestMetastoreService extends LensJerseyTest {
     assertEquals(result.getStatus(), Status.SUCCEEDED, String.valueOf(result));
   }
 
-  @BeforeTest
+  @BeforeMethod
   public void setUp() throws Exception {
     super.setUp();
     cubeObjectFactory = new ObjectFactory();
@@ -99,7 +89,7 @@ public class TestMetastoreService extends LensJerseyTest {
 
   }
 
-  @AfterTest
+  @AfterMethod
   public void tearDown() throws Exception {
     metastoreService.closeSession(lensSessionId);
     super.tearDown();
@@ -107,62 +97,67 @@ public class TestMetastoreService extends LensJerseyTest {
 
   @Override
   protected Application configure() {
+    enable(TestProperties.LOG_TRAFFIC);
+    enable(TestProperties.DUMP_ENTITY);
     return new MetastoreApp();
   }
 
-  @Override
-  protected void configureClient(ClientConfig config) {
-    config.register(MultiPartFeature.class);
-  }
-
-  @Test
-  public void testSetDatabase() throws Exception {
-    WebTarget dbTarget = target().path("metastore").path("databases/current");
-    String dbName = "test_set_db";
+  @Test(dataProvider = "mediaTypeData")
+  public void testSetDatabase(MediaType mediaType) throws Exception {
+    String prevDb = getCurrentDatabase(mediaType);
+    String dbName = "test_set_db" + mediaType.getSubtype();
     try {
-      dbTarget.queryParam("sessionid", lensSessionId).request(mediaType).put(Entity.xml(dbName), APIResult.class);
-      fail("Should get 404");
-    } catch (NotFoundException e) {
-      // expected
-    }
+      WebTarget dbTarget = target().path("metastore").path("databases/current");
+      try {
+        dbTarget.queryParam("sessionid", lensSessionId).request(mediaType).put(getEntityForString(dbName, mediaType),
+          APIResult.class);
+        fail("Should get 404");
+      } catch (NotFoundException e) {
+        // expected
+      }
 
-    // create
-    APIResult result = target().path("metastore").path("databases")
-      .queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(dbName), APIResult.class);
-    assertNotNull(result);
-    assertSuccess(result);
+      // create
+      APIResult result = target().path("metastore").path("databases")
+        .queryParam("sessionid", lensSessionId).request(mediaType).post(getEntityForString(dbName, mediaType), APIResult
+          .class);
+      assertNotNull(result);
+      assertSuccess(result);
 
-    // set
-    result = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType)
-      .put(Entity.xml(dbName), APIResult.class);
-    assertNotNull(result);
-    assertSuccess(result);
+      // set
+      result = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType)
+        .put(getEntityForString(dbName, mediaType), APIResult.class);
+      assertNotNull(result);
+      assertSuccess(result);
 
-    // set without session id, we should get bad request
-    try {
-      dbTarget.request(mediaType).put(Entity.xml(dbName), APIResult.class);
-      fail("Should have thrown bad request exception");
-    } catch (BadRequestException badReq) {
-      // expected
-    }
+      // set without session id, we should get bad request
+      try {
+        dbTarget.request(mediaType).put(getEntityForString(dbName, mediaType), APIResult.class);
+        fail("Should have thrown bad request exception");
+      } catch (BadRequestException badReq) {
+        // expected
+      }
 
-    String current = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType).get(String.class);
-    assertEquals(current, dbName);
+      String current = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType).get(String.class);
+      assertEquals(current, dbName);
+    } finally {
+      setCurrentDatabase(prevDb, mediaType);
+      dropDatabase(dbName, mediaType);
+    }
   }
 
-  @Test
-  public void testCreateDatabase() throws Exception {
-    final String newDb = dbPFX + "new_db";
+  @Test(dataProvider = "mediaTypeData")
+  public void testCreateDatabase(MediaType mediaType) throws Exception {
+    final String newDb = dbPFX + "new_db" + mediaType.getSubtype();
     WebTarget dbTarget = target().path("metastore").path("databases");
 
     APIResult result = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType)
-      .post(Entity.xml(newDb), APIResult.class);
+      .post(getEntityForString(newDb, mediaType), APIResult.class);
     assertNotNull(result);
     assertSuccess(result);
 
     // Create again
     result = dbTarget.queryParam("sessionid", lensSessionId).queryParam("ignoreIfExisting", false)
-      .request(mediaType).post(Entity.xml(newDb), APIResult.class);
+      .request(mediaType).post(getEntityForString(newDb, mediaType), APIResult.class);
     assertEquals(result.getStatus(), APIResult.Status.FAILED);
     log.info(">> Result message " + result.getMessage());
 
@@ -170,13 +165,13 @@ public class TestMetastoreService extends LensJerseyTest {
     dbTarget.path(newDb).queryParam("sessionid", lensSessionId).request().delete();
   }
 
-  @Test
-  public void testDropDatabase() throws Exception {
-    final String dbName = dbPFX + "del_db";
+  @Test(dataProvider = "mediaTypeData")
+  public void testDropDatabase(MediaType mediaType) throws Exception {
+    final String dbName = dbPFX + "del_db" + mediaType.getSubtype();
     final WebTarget dbTarget = target().path("metastore").path("databases");
     // First create the database
     APIResult create = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType)
-      .post(Entity.xml(dbName), APIResult.class);
+      .post(getEntityForString(dbName, mediaType), APIResult.class);
     assertSuccess(create);
 
     // Now drop it
@@ -186,16 +181,16 @@ public class TestMetastoreService extends LensJerseyTest {
     assertSuccess(drop);
   }
 
-  @Test
-  public void testGetAllDatabases() throws Exception {
-    final String[] dbsToCreate = {"db_1", "db_2", "db_3"};
+  @Test(dataProvider = "mediaTypeData")
+  public void testGetAllDatabases(MediaType mediaType) throws Exception {
+    final String[] dbsToCreate = {"db_1" + mediaType.getSubtype(),
+      "db_2" + mediaType.getSubtype(), "db_3" + mediaType.getSubtype(), };
     final WebTarget dbTarget = target().path("metastore").path("databases");
 
     for (String name : dbsToCreate) {
-      dbTarget.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(name));
+      dbTarget.queryParam("sessionid", lensSessionId).request(mediaType).post(getEntityForString(name, mediaType));
     }
 
-
     StringList allDbs = target().path("metastore").path("databases")
       .queryParam("sessionid", lensSessionId).request(mediaType)
       .get(StringList.class);
@@ -216,16 +211,16 @@ public class TestMetastoreService extends LensJerseyTest {
     }
   }
 
-  private void createDatabase(String dbName) throws Exception {
+  private void createDatabase(String dbName, MediaType mediaType) throws Exception {
     WebTarget dbTarget = target().path("metastore").path("databases");
 
     APIResult result = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType)
-      .post(Entity.xml(dbName), APIResult.class);
+      .post(getEntityForString(dbName, mediaType), APIResult.class);
     assertNotNull(result);
     assertSuccess(result);
   }
 
-  private void createStorage(String storageName) throws Exception {
+  private void createStorage(String storageName, MediaType mediaType) throws Exception {
     WebTarget target = target().path("metastore").path("storages");
 
     XStorage xs = new XStorage();
@@ -237,13 +232,15 @@ public class TestMetastoreService extends LensJerseyTest {
     prop.setValue("prop1.value");
     xs.getProperties().getProperty().add(prop);
 
-    APIResult result = target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(
-      cubeObjectFactory.createXStorage(xs)), APIResult.class);
+    APIResult result = target.queryParam("sessionid", lensSessionId).request(mediaType).post(
+      Entity.entity(new GenericEntity<JAXBElement<XStorage>>(cubeObjectFactory.createXStorage(xs)) {
+      }, mediaType),
+      APIResult.class);
     assertNotNull(result);
     assertSuccess(result);
   }
 
-  private void dropStorage(String storageName) throws Exception {
+  private void dropStorage(String storageName, MediaType mediaType) throws Exception {
     WebTarget target = target().path("metastore").path("storages").path(storageName);
 
     APIResult result = target
@@ -251,7 +248,7 @@ public class TestMetastoreService extends LensJerseyTest {
     assertSuccess(result);
   }
 
-  private void dropDatabase(String dbName) throws Exception {
+  private void dropDatabase(String dbName, MediaType mediaType) throws Exception {
     WebTarget dbTarget = target().path("metastore").path("databases").path(dbName);
 
     APIResult result = dbTarget.queryParam("cascade", "true")
@@ -259,14 +256,14 @@ public class TestMetastoreService extends LensJerseyTest {
     assertSuccess(result);
   }
 
-  private void setCurrentDatabase(String dbName) throws Exception {
+  private void setCurrentDatabase(String dbName, MediaType mediaType) throws Exception {
     WebTarget dbTarget = target().path("metastore").path("databases/current");
-    APIResult result = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType).put(Entity.xml(dbName),
-      APIResult.class);
+    APIResult result = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType)
+      .put(getEntityForString(dbName, mediaType), APIResult.class);
     assertSuccess(result);
   }
 
-  private String getCurrentDatabase() throws Exception {
+  private String getCurrentDatabase(MediaType mediaType) throws Exception {
     return target().path("metastore").path("databases/current")
       .queryParam("sessionid", lensSessionId).request(mediaType).get(String.class);
   }
@@ -462,25 +459,27 @@ public class TestMetastoreService extends LensJerseyTest {
     }
   }
 
-  @Test
-  public void testCreateCube() throws Exception {
-    final String DB = dbPFX + "test_create_cube";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
+  @Test(dataProvider = "mediaTypeData")
+  public void testCreateCube(MediaType mediaType) throws Exception {
+    final String DB = dbPFX + "test_create_cube" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
     try {
       final XCube cube = createTestCube("testCube1");
       final WebTarget target = target().path("metastore").path("cubes");
       APIResult result;
       try {
         // first try without a session id
-        target.request(mediaType).post(Entity.xml(cubeObjectFactory.createXCube(cube)), APIResult.class);
+        target.request(mediaType).post(Entity.entity(new GenericEntity<JAXBElement<XCube>>(
+          cubeObjectFactory.createXCube(cube)){}, mediaType), APIResult.class);
         fail("Should have thrown bad request exception");
       } catch (BadRequestException badReq) {
         // expected
       }
       result = target.queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXCube(cube)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XCube>>(cubeObjectFactory.createXCube(cube)){}, mediaType),
+          APIResult.class);
       assertNotNull(result);
       assertSuccess(result);
 
@@ -490,14 +489,16 @@ public class TestMetastoreService extends LensJerseyTest {
       // create invalid derived cube
       XCube dcube = createDerivedCube("testderived", "testCube1", true);
       result = target.queryParam("sessionid", lensSessionId).request(
-        mediaType).post(Entity.xml(cubeObjectFactory.createXCube(dcube)), APIResult.class);
+        mediaType).post(Entity.entity(new GenericEntity<JAXBElement<XCube>>(cubeObjectFactory.createXCube(dcube)){},
+        mediaType), APIResult.class);
       assertEquals(result.getStatus(), Status.FAILED);
       assertEquals(result.getMessage(), "Problem in submitting entity: Derived cube invalid: Measures "
         + "[random_measure] and Dim Attributes [random_dim] were not present in parent cube testcube1");
       // create derived cube
       dcube = createDerivedCube("testderived", "testCube1", false);
       result = target.queryParam("sessionid", lensSessionId).request(
-        mediaType).post(Entity.xml(cubeObjectFactory.createXCube(dcube)), APIResult.class);
+        mediaType).post(Entity.entity(new GenericEntity<JAXBElement<XCube>>(cubeObjectFactory.createXCube(dcube)){},
+        mediaType), APIResult.class);
       assertNotNull(result);
       assertSuccess(result);
 
@@ -528,7 +529,8 @@ public class TestMetastoreService extends LensJerseyTest {
       qcube.getProperties().getProperty().add(xp);
 
       result = target.queryParam("sessionid", lensSessionId).request(
-        mediaType).post(Entity.xml(cubeObjectFactory.createXCube(qcube)), APIResult.class);
+        mediaType).post(Entity.entity(new GenericEntity<JAXBElement<XCube>>(cubeObjectFactory.createXCube(qcube)){},
+        mediaType), APIResult.class);
       assertNotNull(result);
       assertSuccess(result);
 
@@ -546,12 +548,11 @@ public class TestMetastoreService extends LensJerseyTest {
         LensUtil.<String, Boolean>getHashMap("testCube1", true, "testderived", true, "testNoQueryCube", false));
 
     } finally {
-      dropDatabase(DB);
-      setCurrentDatabase(prevDb);
+      dropDatabase(DB, mediaType);
+      setCurrentDatabase(prevDb, mediaType);
     }
   }
 
-  @Test
   public void testMeasureJaxBConversion() throws Exception {
     CubeMeasure cubeMeasure =
       new ColumnMeasure(new FieldSchema("msr1", "int", "first measure"), null, null, null, null, null, null, null,
@@ -563,12 +564,12 @@ public class TestMetastoreService extends LensJerseyTest {
     assertEquals(actualMeasure.getMax(), measure.getMax());
   }
 
-  @Test
-  public void testGetCube() throws Exception {
-    final String DB = dbPFX + "test_get_cube";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
+  @Test(dataProvider = "mediaTypeData")
+  public void testGetCube(MediaType mediaType) throws Exception {
+    final String DB = dbPFX + "test_get_cube" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
 
     try {
       final XBaseCube cube = createTestCube("testGetCube");
@@ -576,7 +577,8 @@ public class TestMetastoreService extends LensJerseyTest {
       WebTarget target = target().path("metastore").path("cubes");
       JAXBElement<XCube> element = cubeObjectFactory.createXCube(cube);
       APIResult result =
-        target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
+        target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.entity(
+          new GenericEntity<JAXBElement<XCube>>(element){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       // Now get
@@ -674,7 +676,8 @@ public class TestMetastoreService extends LensJerseyTest {
       // Create this cube first
       element = cubeObjectFactory.createXCube(dcube);
       result =
-        target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
+        target.queryParam("sessionid", lensSessionId).request(mediaType).post(
+          Entity.entity(new GenericEntity<JAXBElement<XCube>>(element){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       // Now get
@@ -688,17 +691,17 @@ public class TestMetastoreService extends LensJerseyTest {
       assertEquals(actual2.getMeasureNames().getMeasureName().size(), dcube.getMeasureNames().getMeasureName().size());
       assertEquals(actual2.getDimAttrNames().getAttrName().size(), dcube.getDimAttrNames().getAttrName().size());
     } finally {
-      dropDatabase(DB);
-      setCurrentDatabase(prevDb);
+      dropDatabase(DB, mediaType);
+      setCurrentDatabase(prevDb, mediaType);
     }
   }
 
-  @Test
-  public void testDropCube() throws Exception {
-    final String DB = dbPFX + "test_drop_cube";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
+  @Test(dataProvider = "mediaTypeData")
+  public void testDropCube(MediaType mediaType) throws Exception {
+    final String DB = dbPFX + "test_drop_cube" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
 
     try {
       final XCube cube = createTestCube("test_drop_cube");
@@ -706,7 +709,8 @@ public class TestMetastoreService extends LensJerseyTest {
       WebTarget target = target().path("metastore").path("cubes");
       JAXBElement<XCube> element = cubeObjectFactory.createXCube(cube);
       APIResult result =
-        target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
+        target.queryParam("sessionid", lensSessionId).request(mediaType).post(
+          Entity.entity(new GenericEntity<JAXBElement<XCube>>(element){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       final XCube dcube = createDerivedCube("test_drop_derived_cube", "test_drop_cube", false);
@@ -714,7 +718,8 @@ public class TestMetastoreService extends LensJerseyTest {
       // Create this cube first
       element = cubeObjectFactory.createXCube(dcube);
       result =
-        target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
+        target.queryParam("sessionid", lensSessionId).request(mediaType).post(
+          Entity.entity(new GenericEntity<JAXBElement<XCube>>(element){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       target = target().path("metastore").path("cubes").path("test_drop_derived_cube");
@@ -745,18 +750,18 @@ public class TestMetastoreService extends LensJerseyTest {
         log.error("Resource not found.", ex);
       }
     } finally {
-      dropDatabase(DB);
-      setCurrentDatabase(prevDb);
+      dropDatabase(DB, mediaType);
+      setCurrentDatabase(prevDb, mediaType);
     }
   }
 
-  @Test
-  public void testUpdateCube() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testUpdateCube(MediaType mediaType) throws Exception {
     final String cubeName = "test_update";
-    final String DB = dbPFX + "test_update_cube";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
+    final String DB = dbPFX + "test_update_cube" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
 
     try {
       final XBaseCube cube = createTestCube(cubeName);
@@ -764,7 +769,8 @@ public class TestMetastoreService extends LensJerseyTest {
       WebTarget target = target().path("metastore").path("cubes");
       JAXBElement<XCube> element = cubeObjectFactory.createXCube(cube);
       APIResult result =
-        target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
+        target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.entity(
+          new GenericEntity<JAXBElement<XCube>>(element){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       // Update something
@@ -788,7 +794,8 @@ public class TestMetastoreService extends LensJerseyTest {
 
       element = cubeObjectFactory.createXCube(cube);
       result = target.path(cubeName)
-        .queryParam("sessionid", lensSessionId).request(mediaType).put(Entity.xml(element), APIResult.class);
+        .queryParam("sessionid", lensSessionId).request(mediaType).put(Entity.entity(
+          new GenericEntity<JAXBElement<XCube>>(element){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       JAXBElement<XCube> got =
@@ -807,7 +814,8 @@ public class TestMetastoreService extends LensJerseyTest {
       XDerivedCube dcube = createDerivedCube("test_update_derived", cubeName, true);
       element = cubeObjectFactory.createXCube(dcube);
       result =
-        target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
+        target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.entity(
+          new GenericEntity<JAXBElement<XCube>>(element){}, mediaType), APIResult.class);
       assertEquals(result.getStatus(), Status.FAILED);
       assertEquals(result.getMessage(), "Problem in submitting entity: Derived cube invalid: Measures "
         + "[random_measure] and Dim Attributes [random_dim] were not present in parent cube test_update");
@@ -815,7 +823,8 @@ public class TestMetastoreService extends LensJerseyTest {
       // Create this cube first
       element = cubeObjectFactory.createXCube(dcube);
       result =
-        target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
+        target.queryParam("sessionid", lensSessionId).request(mediaType).post(
+          Entity.entity(new GenericEntity<JAXBElement<XCube>>(element){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       // Update something
@@ -831,7 +840,8 @@ public class TestMetastoreService extends LensJerseyTest {
 
       element = cubeObjectFactory.createXCube(dcube);
       result = target.path("test_update_derived")
-        .queryParam("sessionid", lensSessionId).request(mediaType).put(Entity.xml(element), APIResult.class);
+        .queryParam("sessionid", lensSessionId).request(mediaType).put(Entity.entity(new
+          GenericEntity<JAXBElement<XCube>>(element){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       got = target.path("test_update_derived")
@@ -849,19 +859,19 @@ public class TestMetastoreService extends LensJerseyTest {
       assertEquals(((AbstractCubeTable) hdcube).getProperties().get("foo.derived2"), "bar.derived2");
 
     } finally {
-      dropDatabase(DB);
-      setCurrentDatabase(prevDb);
+      dropDatabase(DB, mediaType);
+      setCurrentDatabase(prevDb, mediaType);
     }
   }
 
-  @Test
-  public void testStorage() throws Exception {
-    final String DB = dbPFX + "test_storage";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
+  @Test(dataProvider = "mediaTypeData")
+  public void testStorage(MediaType mediaType) throws Exception {
+    final String DB = dbPFX + "test_storage" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
     try {
-      createStorage("store1");
+      createStorage("store1", mediaType);
       final WebTarget target = target().path("metastore").path("storages");
 
       StringList storages = target.queryParam("sessionid", lensSessionId).request(mediaType).get(StringList.class);
@@ -891,7 +901,8 @@ public class TestMetastoreService extends LensJerseyTest {
 
       APIResult result = target.path("store1")
         .queryParam("sessionid", lensSessionId).queryParam("storage", "store1")
-        .request(mediaType).put(Entity.xml(cubeObjectFactory.createXStorage(store1)), APIResult.class);
+        .request(mediaType).put(Entity.entity(new GenericEntity<JAXBElement<XStorage>>(cubeObjectFactory
+          .createXStorage(store1)){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       store1 = target.path("store1").queryParam("sessionid", lensSessionId).request(mediaType).get(XStorage.class);
@@ -904,10 +915,10 @@ public class TestMetastoreService extends LensJerseyTest {
       assertEquals(JAXBUtils.mapFromXProperties(store1.getProperties()).get("prop2.name"), "prop2.value");
 
       // drop the storage
-      dropStorage("store1");
+      dropStorage("store1", mediaType);
     } finally {
-      dropDatabase(DB);
-      setCurrentDatabase(prevDb);
+      dropDatabase(DB, mediaType);
+      setCurrentDatabase(prevDb, mediaType);
     }
   }
 
@@ -982,25 +993,22 @@ public class TestMetastoreService extends LensJerseyTest {
     return dt;
   }
 
-  private XDimensionTable createDimTable(String dimTableName) throws Exception {
+  private XDimensionTable createDimTable(String dimTableName, MediaType mediaType) throws Exception {
     XDimension dimension = createDimension("testdim");
     APIResult result = target().path("metastore").path("dimensions")
       .queryParam("sessionid", lensSessionId).request(
-        mediaType).post(Entity.xml(cubeObjectFactory.createXDimension(dimension)), APIResult.class);
+        mediaType).post(Entity.entity(new GenericEntity<JAXBElement<XDimension>>(cubeObjectFactory
+        .createXDimension(dimension)) {}, mediaType), APIResult.class);
     assertSuccess(result);
     XDimensionTable dt = createDimTable("testdim", dimTableName);
     dt.getStorageTables().getStorageTable().add(createStorageTblElement("test", dimTableName, "HOURLY"));
-    final FormDataMultiPart mp = new FormDataMultiPart();
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(),
-      lensSessionId, medType));
-    mp.bodyPart(new FormDataBodyPart(
-      FormDataContentDisposition.name("dimensionTable").fileName("dimtable").build(),
-      cubeObjectFactory.createXDimensionTable(dt), medType));
     result = target()
       .path("metastore")
-      .path("dimtables")
+      .path("dimtables").queryParam("sessionid", lensSessionId)
       .request(mediaType)
-      .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
+      .post(Entity.entity(
+        new GenericEntity<JAXBElement<XDimensionTable>>(cubeObjectFactory.createXDimensionTable(dt)) {},
+        mediaType), APIResult.class);
     assertSuccess(result);
     return dt;
   }
@@ -1054,7 +1062,7 @@ public class TestMetastoreService extends LensJerseyTest {
     return dimension;
   }
 
-  private void createdChainedDimensions() throws Exception {
+  private void createdChainedDimensions(MediaType mediaType) throws Exception {
     XDimension dimension = createDimension("testdim");
     XDimension dimension2 = createDimension("testdim2");
 
@@ -1137,25 +1145,27 @@ public class TestMetastoreService extends LensJerseyTest {
 
     // create
     APIResult result = target.queryParam("sessionid", lensSessionId).request(
-      mediaType).post(Entity.xml(cubeObjectFactory.createXDimension(dimension)), APIResult.class);
+      mediaType).post(Entity.entity(new GenericEntity<JAXBElement<XDimension>>(cubeObjectFactory
+      .createXDimension(dimension)){}, mediaType), APIResult.class);
     assertNotNull(result);
     assertSuccess(result);
 
     // create
     result = target.queryParam("sessionid", lensSessionId).request(
-      mediaType).post(Entity.xml(cubeObjectFactory.createXDimension(dimension2)), APIResult.class);
+      mediaType).post(Entity.entity(new GenericEntity<JAXBElement<XDimension>>(cubeObjectFactory
+      .createXDimension(dimension2)){}, mediaType), APIResult.class);
     assertNotNull(result);
     assertSuccess(result);
   }
 
-  @Test
-  public void testDimension() throws Exception {
-    final String DB = dbPFX + "test_dimension";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
+  @Test(dataProvider = "mediaTypeData")
+  public void testDimension(MediaType mediaType) throws Exception {
+    final String DB = dbPFX + "test_dimension" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
     try {
-      createdChainedDimensions();
+      createdChainedDimensions(mediaType);
 
       final WebTarget target = target().path("metastore").path("dimensions");
 
@@ -1257,7 +1267,8 @@ public class TestMetastoreService extends LensJerseyTest {
 
       APIResult result = target.path("testdim")
         .queryParam("sessionid", lensSessionId)
-        .request(mediaType).put(Entity.xml(cubeObjectFactory.createXDimension(testDim)), APIResult.class);
+        .request(mediaType).put(Entity.entity(new GenericEntity<JAXBElement<XDimension>>(cubeObjectFactory
+          .createXDimension(testDim)){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       XDimension altered = target.path("testdim").queryParam("sessionid", lensSessionId).request(mediaType).get(
@@ -1299,22 +1310,22 @@ public class TestMetastoreService extends LensJerseyTest {
         log.error("Resource not found.", ex);
       }
     } finally {
-      dropDatabase(DB);
-      setCurrentDatabase(prevDb);
+      dropDatabase(DB, mediaType);
+      setCurrentDatabase(prevDb, mediaType);
     }
   }
 
-  @Test
-  public void testCreateAndDropDimensionTable() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testCreateAndDropDimensionTable(MediaType mediaType) throws Exception {
     final String table = "test_create_dim";
-    final String DB = dbPFX + "test_dim_db";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
-    createStorage("test");
+    final String DB = dbPFX + "test_dim_db" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
+    createStorage("test", mediaType);
 
     try {
-      createDimTable(table);
+      createDimTable(table, mediaType);
 
       // Drop the table now
       APIResult result =
@@ -1334,22 +1345,22 @@ public class TestMetastoreService extends LensJerseyTest {
       }
 
     } finally {
-      setCurrentDatabase(prevDb);
-      dropDatabase(DB);
+      setCurrentDatabase(prevDb, mediaType);
+      dropDatabase(DB, mediaType);
     }
   }
 
-  @Test
-  public void testGetAndUpdateDimensionTable() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testGetAndUpdateDimensionTable(MediaType mediaType) throws Exception {
     final String table = "test_get_dim";
-    final String DB = dbPFX + "test_get_dim_db";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
-    createStorage("test");
+    final String DB = dbPFX + "test_get_dim_db" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
+    createStorage("test", mediaType);
 
     try {
-      XDimensionTable dt1 = createDimTable(table);
+      XDimensionTable dt1 = createDimTable(table, mediaType);
 
       JAXBElement<XDimensionTable> dtElement = target().path("metastore/dimtables").path(table)
         .queryParam("sessionid", lensSessionId).request(mediaType)
@@ -1379,7 +1390,9 @@ public class TestMetastoreService extends LensJerseyTest {
       APIResult result = target().path("metastore/dimtables")
         .path(table)
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .put(Entity.xml(cubeObjectFactory.createXDimensionTable(dt2)), APIResult.class);
+        .put(Entity.entity(new GenericEntity<JAXBElement<XDimensionTable>>(cubeObjectFactory
+            .createXDimensionTable(dt2)){}, mediaType),
+          APIResult.class);
       assertSuccess(result);
 
       // Get the updated table
@@ -1407,7 +1420,7 @@ public class TestMetastoreService extends LensJerseyTest {
       dt3.getStorageTables().getStorageTable().get(0).getTableDesc().setInputFormat(
         SequenceFileInputFormat.class.getCanonicalName());
       // add one more storage table
-      createStorage("testAlterDimStorage");
+      createStorage("testAlterDimStorage", mediaType);
       XStorageTableElement newStorage = createStorageTblElement("testAlterDimStorage", dt3.getTableName(),
         (String[]) null);
       newStorage.getTableDesc().setFieldDelimiter(":");
@@ -1416,7 +1429,9 @@ public class TestMetastoreService extends LensJerseyTest {
       result = target().path("metastore/dimtables")
         .path(table)
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .put(Entity.xml(cubeObjectFactory.createXDimensionTable(dt3)), APIResult.class);
+        .put(Entity.entity(new GenericEntity<JAXBElement<XDimensionTable>>(cubeObjectFactory
+            .createXDimensionTable(dt3)){}, mediaType),
+          APIResult.class);
       assertSuccess(result);
 
       // Get the updated table
@@ -1436,7 +1451,7 @@ public class TestMetastoreService extends LensJerseyTest {
       // get native table and validate altered property
       XNativeTable newdNativeTable = nativeTarget.path("testalterdimstorage_" + table)
         .queryParam("sessionid", lensSessionId)
-        .request(mediaType).get(XNativeTable.class);
+        .request(mediaType).get(new GenericType<JAXBElement<XNativeTable>>() {}).getValue();
       assertEquals(newdNativeTable.getStorageDescriptor().getFieldDelimiter(), ":");
       XNativeTable alteredNativeTable = nativeTarget.path("test_" + table).queryParam("sessionid", lensSessionId)
         .request(mediaType).get(XNativeTable.class);
@@ -1449,22 +1464,22 @@ public class TestMetastoreService extends LensJerseyTest {
           .queryParam("sessionid", lensSessionId).request(mediaType).delete(APIResult.class);
       assertSuccess(result);
     } finally {
-      setCurrentDatabase(prevDb);
-      dropDatabase(DB);
+      setCurrentDatabase(prevDb, mediaType);
+      dropDatabase(DB, mediaType);
     }
   }
 
-  @Test
-  public void testGetDimensionStorages() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testGetDimensionStorages(MediaType mediaType) throws Exception {
     final String table = "test_get_storage";
     final String DB = dbPFX + "test_get_dim_storage_db";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
-    createStorage("test");
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
+    createStorage("test", mediaType);
 
     try {
-      createDimTable(table);
+      createDimTable(table, mediaType);
       StringList storages = target().path("metastore").path("dimtables")
         .path(table).path("storages")
         .queryParam("sessionid", lensSessionId).request(mediaType)
@@ -1472,28 +1487,30 @@ public class TestMetastoreService extends LensJerseyTest {
       assertEquals(storages.getElements().size(), 1);
       assertTrue(storages.getElements().contains("test"));
     } finally {
-      setCurrentDatabase(prevDb);
-      dropDatabase(DB);
+      setCurrentDatabase(prevDb, mediaType);
+      dropDatabase(DB, mediaType);
     }
   }
 
-  @Test
-  public void testAddAndDropDimensionStorages() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testAddAndDropDimensionStorages(MediaType mediaType) throws Exception {
     final String table = "test_add_drop_storage";
-    final String DB = dbPFX + "test_add_drop_dim_storage_db";
-    createDatabase(DB);
-    String prevDb = getCurrentDatabase();
-    setCurrentDatabase(DB);
-    createStorage("test");
-    createStorage("test2");
-    createStorage("test3");
+    final String DB = dbPFX + "test_add_drop_dim_storage_db" + mediaType.getSubtype();
+    createDatabase(DB, mediaType);
+    String prevDb = getCurrentDatabase(mediaType);
+    setCurrentDatabase(DB, mediaType);
+    createStorage("test", mediaType);
+    createStorage("test2", mediaType);
+    createStorage("test3", mediaType);
     try {
-      createDimTable(table);
+      createDimTable(table, mediaType);
 
       XStorageTableElement sTbl = createStorageTblElement("test2", table, "DAILY");
       APIResult result = target().path("metastore/dimtables").path(table).path("/storages")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXStorageTableElement(sTbl)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XStorageTableElement>>(cubeObjectFactory
+          .createXStorageTableElement(sTbl)) {
+        }, mediaType), APIResult.class);
       assertSuccess(result);
 
       StringList storages = target().path("metastore").path("dimtables")
@@ -1501,8 +1518,8 @@ public class TestMetastoreService extends LensJerseyTest {
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .get(StringList.class);
       assertEquals(storages.getElements().size(), 2);
-      assertTrue(storages.getElements().contains("test"));
-      assertTrue(storages.getElements().contains("test2"));
+      assertTrue(storages.getElements().contains("test"), "Got " + storages.getElements().toString());
+      assertTrue(storages.getElements().contains("test2"), "Got " + storages.getElements().toString());
 
       // Check get table also contains the storage
       JAXBElement<XDimensionTable> dt = target().path("metastore/dimtables").path(table)
@@ -1542,7 +1559,8 @@ public class TestMetastoreService extends LensJerseyTest {
       sTbl = createStorageTblElement("test3", table, (String[]) null);
       result = target().path("metastore/dimtables").path(table).path("/storages")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXStorageTableElement(sTbl)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XStorageTableElement>>(cubeObjectFactory
+          .createXStorageTableElement(sTbl)){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       storages = target().path("metastore").path("dimtables")
@@ -1563,27 +1581,28 @@ public class TestMetastoreService extends LensJerseyTest {
       assertTrue(cdim.getStorages().contains("test3"));
       assertNull(cdim.getSnapshotDumpPeriods().get("test3"));
     } finally {
-      setCurrentDatabase(prevDb);
-      dropDatabase(DB);
+      setCurrentDatabase(prevDb, mediaType);
+      dropDatabase(DB, mediaType);
     }
   }
 
-  @Test
-  public void testAddDropAllDimStorages() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testAddDropAllDimStorages(MediaType mediaType) throws Exception {
     final String table = "testAddDropAllDimStorages";
-    final String DB = dbPFX + "testAddDropAllDimStorages_db";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
-    createStorage("test");
-    createStorage("test2");
+    final String DB = dbPFX + "testAddDropAllDimStorages_db" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
+    createStorage("test", mediaType);
+    createStorage("test2", mediaType);
 
     try {
-      createDimTable(table);
+      createDimTable(table, mediaType);
       XStorageTableElement sTbl = createStorageTblElement("test2", table, "DAILY");
       APIResult result = target().path("metastore/dimtables").path(table).path("/storages")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXStorageTableElement(sTbl)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XStorageTableElement>>(cubeObjectFactory
+          .createXStorageTableElement(sTbl)){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       result = target().path("metastore/dimtables/").path(table).path("storages")
@@ -1591,7 +1610,6 @@ public class TestMetastoreService extends LensJerseyTest {
         .delete(APIResult.class);
       assertSuccess(result);
 
-
       JAXBElement<XDimensionTable> dt = target().path("metastore/dimtables").path(table)
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .get(new GenericType<JAXBElement<XDimensionTable>>() {});
@@ -1600,8 +1618,8 @@ public class TestMetastoreService extends LensJerseyTest {
       assertTrue(cdim.getStorages().isEmpty());
       assertTrue(cdim.getSnapshotDumpPeriods().isEmpty());
     } finally {
-      setCurrentDatabase(prevDb);
-      dropDatabase(DB);
+      setCurrentDatabase(prevDb, mediaType);
+      dropDatabase(DB, mediaType);
     }
   }
 
@@ -1636,31 +1654,27 @@ public class TestMetastoreService extends LensJerseyTest {
     return f;
   }
 
-  @Test
-  public void testCreateFactTable() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testCreateFactTable(MediaType mediaType) throws Exception {
     final String table = "testCreateFactTable";
-    final String DB = dbPFX + "testCreateFactTable_DB";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
-    createStorage("S1");
-    createStorage("S2");
+    final String DB = dbPFX + "testCreateFactTable_DB" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
+    createStorage("S1", mediaType);
+    createStorage("S2", mediaType);
     try {
 
       XFactTable f = createFactTable(table);
       f.getStorageTables().getStorageTable().add(createStorageTblElement("S1", table, "HOURLY"));
       f.getStorageTables().getStorageTable().add(createStorageTblElement("S2", table, "DAILY"));
-      final FormDataMultiPart mp = new FormDataMultiPart();
-      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(),
-        lensSessionId, medType));
-      mp.bodyPart(new FormDataBodyPart(
-        FormDataContentDisposition.name("fact").fileName("fact").build(),
-        cubeObjectFactory.createXFactTable(f), medType));
       APIResult result = target()
         .path("metastore")
-        .path("facts")
+        .path("facts").queryParam("sessionid", lensSessionId)
         .request(mediaType)
-        .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
+        .post(Entity.entity(
+          new GenericEntity<JAXBElement<XFactTable>>(cubeObjectFactory.createXFactTable(f)){}, mediaType),
+          APIResult.class);
       assertSuccess(result);
 
       // Get all fact names, this should contain the fact table
@@ -1712,37 +1726,33 @@ public class TestMetastoreService extends LensJerseyTest {
         // PASS
       }
     } finally {
-      setCurrentDatabase(prevDb);
-      dropDatabase(DB);
+      setCurrentDatabase(prevDb, mediaType);
+      dropDatabase(DB, mediaType);
     }
   }
 
-  @Test
-  public void testUpdateFactTable() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testUpdateFactTable(MediaType mediaType) throws Exception {
     final String table = "testUpdateFactTable";
-    final String DB = dbPFX + "testUpdateFactTable_DB";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
-    createStorage("S1");
-    createStorage("S2");
-    createStorage("S3");
+    final String DB = dbPFX + "testUpdateFactTable_DB" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
+    createStorage("S1", mediaType);
+    createStorage("S2", mediaType);
+    createStorage("S3", mediaType);
     try {
 
       XFactTable f = createFactTable(table);
       f.getStorageTables().getStorageTable().add(createStorageTblElement("S1", table, "HOURLY"));
       f.getStorageTables().getStorageTable().add(createStorageTblElement("S2", table, "DAILY"));
-      final FormDataMultiPart mp = new FormDataMultiPart();
-      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(),
-        lensSessionId, medType));
-      mp.bodyPart(new FormDataBodyPart(
-        FormDataContentDisposition.name("fact").fileName("fact").build(),
-        cubeObjectFactory.createXFactTable(f), medType));
       APIResult result = target()
         .path("metastore")
-        .path("facts")
+        .path("facts").queryParam("sessionid", lensSessionId)
         .request(mediaType)
-        .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
+        .post(Entity.entity(
+            new GenericEntity<JAXBElement<XFactTable>>(cubeObjectFactory.createXFactTable(f)){}, mediaType),
+          APIResult.class);
       assertSuccess(result);
 
       // Get the created table
@@ -1768,7 +1778,9 @@ public class TestMetastoreService extends LensJerseyTest {
       // Update
       result = target().path("metastore").path("facts").path(table)
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .put(Entity.xml(cubeObjectFactory.createXFactTable(update)), APIResult.class);
+        .put(Entity.entity(new GenericEntity<JAXBElement<XFactTable>>(cubeObjectFactory.createXFactTable(update)){},
+            mediaType),
+          APIResult.class);
       assertSuccess(result);
 
       // Get the updated table
@@ -1801,7 +1813,7 @@ public class TestMetastoreService extends LensJerseyTest {
 
       // get native table and validate altered property
       XNativeTable alteredNativeTable = nativeTarget.path("s1_" + table).queryParam("sessionid", lensSessionId)
-        .request(mediaType).get(XNativeTable.class);
+        .request(mediaType).get(new GenericType<JAXBElement<XNativeTable>>() {}).getValue();
       assertEquals(alteredNativeTable.getStorageDescriptor().getFieldDelimiter(), "#");
 
       // Finally, drop the fact table
@@ -1823,37 +1835,33 @@ public class TestMetastoreService extends LensJerseyTest {
         // PASS
       }
     } finally {
-      setCurrentDatabase(prevDb);
-      dropDatabase(DB);
+      setCurrentDatabase(prevDb, mediaType);
+      dropDatabase(DB, mediaType);
     }
   }
 
-  @Test
-  public void testFactStorages() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testFactStorages(MediaType mediaType) throws Exception {
     final String table = "testFactStorages";
-    final String DB = dbPFX + "testFactStorages_DB";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
-    createStorage("S1");
-    createStorage("S2");
-    createStorage("S3");
+    final String DB = dbPFX + "testFactStorages_DB" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
+    createStorage("S1", mediaType);
+    createStorage("S2", mediaType);
+    createStorage("S3", mediaType);
 
     try {
       XFactTable f = createFactTable(table);
       f.getStorageTables().getStorageTable().add(createStorageTblElement("S1", table, "HOURLY"));
       f.getStorageTables().getStorageTable().add(createStorageTblElement("S2", table, "DAILY"));
-      final FormDataMultiPart mp = new FormDataMultiPart();
-      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(),
-        lensSessionId, medType));
-      mp.bodyPart(new FormDataBodyPart(
-        FormDataContentDisposition.name("fact").fileName("fact").build(),
-        cubeObjectFactory.createXFactTable(f), medType));
       APIResult result = target()
         .path("metastore")
-        .path("facts")
+        .path("facts").queryParam("sessionid", lensSessionId)
         .request(mediaType)
-        .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
+        .post(Entity.entity(
+            new GenericEntity<JAXBElement<XFactTable>>(cubeObjectFactory.createXFactTable(f)){}, mediaType),
+          APIResult.class);
       assertSuccess(result);
 
       // Test get storages
@@ -1866,7 +1874,8 @@ public class TestMetastoreService extends LensJerseyTest {
       XStorageTableElement sTbl = createStorageTblElement("S3", table, "HOURLY", "DAILY", "MONTHLY");
       result = target().path("metastore/facts").path(table).path("storages")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXStorageTableElement(sTbl)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XStorageTableElement>>(cubeObjectFactory
+          .createXStorageTableElement(sTbl)){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       // Get the fact storage
@@ -1881,7 +1890,8 @@ public class TestMetastoreService extends LensJerseyTest {
 
       JAXBElement<XFactTable> gotFactElement = target().path("metastore/facts").path(table)
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .get(new GenericType<JAXBElement<XFactTable>>() {});
+        .get(new GenericType<JAXBElement<XFactTable>>() {
+        });
       XFactTable gotFact = gotFactElement.getValue();
       CubeFactTable ucf = JAXBUtils.cubeFactFromFactTable(gotFact);
 
@@ -1900,8 +1910,8 @@ public class TestMetastoreService extends LensJerseyTest {
       assertEquals(storageList.getElements().size(), 2);
       assertFalse(storageList.getElements().contains("S3"));
     } finally {
-      setCurrentDatabase(prevDb);
-      dropDatabase(DB);
+      setCurrentDatabase(prevDb, mediaType);
+      dropDatabase(DB, mediaType);
     }
   }
 
@@ -1937,28 +1947,29 @@ public class TestMetastoreService extends LensJerseyTest {
     return xp;
   }
 
-  @Test
-  public void testLatestDateWithInputTimeDimAbsentFromAtleastOneFactPartition() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testLatestDateWithInputTimeDimAbsentFromAtleastOneFactPartition(MediaType mediaType) throws Exception {
 
     final String dbName = dbPFX + getUniqueDbName();
-    String prevDb = getCurrentDatabase();
+    String prevDb = getCurrentDatabase(mediaType);
 
     try {
 
       // Begin: Setup
-      createDatabase(dbName);
-      setCurrentDatabase(dbName);
+      createDatabase(dbName, mediaType);
+      setCurrentDatabase(dbName, mediaType);
 
       String[] storages = {"S1"};
       for (String storage : storages) {
-        createStorage(storage);
+        createStorage(storage, mediaType);
       }
 
       // Create a cube with name testCube
       final String cubeName = "testCube";
       final XCube cube = createTestCube(cubeName);
       APIResult result = target().path("metastore").path("cubes").queryParam("sessionid", lensSessionId)
-        .request(mediaType).post(Entity.xml(cubeObjectFactory.createXCube(cube)), APIResult.class);
+        .request(mediaType).post(Entity.entity(new GenericEntity<JAXBElement<XCube>>(
+          cubeObjectFactory.createXCube(cube)){}, mediaType), APIResult.class);
       assertSuccess(result);
 
       // Create two facts and fact storage tables with one of the facts
@@ -1975,8 +1986,8 @@ public class TestMetastoreService extends LensJerseyTest {
       String fact2TableName = "fact2";
       String[] fact2TimePartColNames = {timeDimensionPresentInPartitionOfAllFacts};
 
-      createTestFactAndStorageTable(cubeName, storages, fact1TableName, fact1TimePartColNames);
-      createTestFactAndStorageTable(cubeName, storages, fact2TableName, fact2TimePartColNames);
+      createTestFactAndStorageTable(cubeName, storages, fact1TableName, fact1TimePartColNames, mediaType);
+      createTestFactAndStorageTable(cubeName, storages, fact2TableName, fact2TimePartColNames, mediaType);
 
       // Add partition to fact storage table of the fact whose partition has all time dimension
 
@@ -1998,7 +2009,9 @@ public class TestMetastoreService extends LensJerseyTest {
       APIResult partAddResult = target().path("metastore/facts/").path(fact1TableName)
         .path("storages/" + storages[0] + "/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartition(xp)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartition>>(cubeObjectFactory.createXPartition(xp)){},
+            mediaType),
+          APIResult.class);
       assertSuccess(partAddResult);
 
       // End: Setup
@@ -2016,44 +2029,42 @@ public class TestMetastoreService extends LensJerseyTest {
 
     } finally {
       // Cleanup
-      setCurrentDatabase(prevDb);
-      dropDatabase(dbName);
+      setCurrentDatabase(prevDb, mediaType);
+      dropDatabase(dbName, mediaType);
     }
   }
 
 
   @SuppressWarnings("deprecation")
-  @Test
-  public void testFactStoragePartitions() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testFactStoragePartitions(MediaType mediaType) throws Exception {
     final String table = "testFactStoragePartitions";
-    final String DB = dbPFX + "testFactStoragePartitions_DB";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
-    createStorage("S1");
-    createStorage("S2");
+    final String DB = dbPFX + "testFactStoragePartitions_DB" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
+    createStorage("S1", mediaType);
+    createStorage("S2", mediaType);
 
     try {
 
       final XCube cube = createTestCube("testCube");
       target().path("metastore").path("cubes").queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXCube(cube)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XCube>>(cubeObjectFactory.createXCube(cube)) {
+          }, mediaType),
+          APIResult.class);
 
       XFactTable f = createFactTable(table);
       f.getStorageTables().getStorageTable().add(createStorageTblElement("S1", table, "HOURLY"));
       f.getStorageTables().getStorageTable().add(createStorageTblElement("S2", table, "DAILY"));
       f.getStorageTables().getStorageTable().add(createStorageTblElement("S2", table, "HOURLY"));
-      final FormDataMultiPart mp = new FormDataMultiPart();
-      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(),
-        lensSessionId, medType));
-      mp.bodyPart(new FormDataBodyPart(
-        FormDataContentDisposition.name("fact").fileName("fact").build(),
-        cubeObjectFactory.createXFactTable(f), medType));
       APIResult result = target()
         .path("metastore")
-        .path("facts")
+        .path("facts").queryParam("sessionid", lensSessionId)
         .request(mediaType)
-        .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
+        .post(Entity.entity(
+            new GenericEntity<JAXBElement<XFactTable>>(cubeObjectFactory.createXFactTable(f)){}, mediaType),
+          APIResult.class);
       assertSuccess(result);
 
       APIResult partAddResult;
@@ -2070,7 +2081,8 @@ public class TestMetastoreService extends LensJerseyTest {
         .add(createTimePartSpecElement(partDate, "non_existant_time_part"));
       partAddResult = target().path("metastore/facts/").path(table).path("storages/S2/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartition(xp2)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartition>>(cubeObjectFactory.createXPartition(xp2)){},
+          mediaType), APIResult.class);
       assertEquals(partAddResult.getStatus(), Status.FAILED);
       assertEquals(partAddResult.getMessage(), "No timeline found for fact=testFactStoragePartitions, storage=S2, "
         + "update period=HOURLY, partition column=non_existant_time_part.");
@@ -2078,25 +2090,29 @@ public class TestMetastoreService extends LensJerseyTest {
       XPartition xp = createPartition(table, partDate);
       partAddResult = target().path("metastore/facts/").path(table).path("storages/S2/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartition(xp)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartition>>(cubeObjectFactory.createXPartition(xp)){},
+          mediaType), APIResult.class);
       assertSuccess(partAddResult);
 
       // add same should fail
       partAddResult = target().path("metastore/facts/").path(table).path("storages/S2/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartition(xp)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartition>>(cubeObjectFactory.createXPartition(xp)){},
+          mediaType), APIResult.class);
       assertEquals(partAddResult.getStatus(), Status.FAILED);
 
       xp.setLocation(xp.getLocation() + "/a/b/c");
       APIResult partUpdateResult = target().path("metastore/facts/").path(table).path("storages/S2/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .put(Entity.xml(cubeObjectFactory.createXPartition(xp)), APIResult.class);
+        .put(Entity.entity(new GenericEntity<JAXBElement<XPartition>>(cubeObjectFactory.createXPartition(xp)){},
+          mediaType), APIResult.class);
       assertSuccess(partUpdateResult);
 
       JAXBElement<XPartitionList> partitionsElement = target().path("metastore/facts").path(table)
         .path("storages/S2/partitions")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .get(new GenericType<JAXBElement<XPartitionList>>() {});
+        .get(new GenericType<JAXBElement<XPartitionList>>() {
+        });
 
       XPartitionList partitions = partitionsElement.getValue();
       assertNotNull(partitions);
@@ -2125,7 +2141,8 @@ public class TestMetastoreService extends LensJerseyTest {
       parts.getPartition().add(createPartition(table, DateUtils.addHours(partDate, 1)));
       partAddResult = target().path("metastore/facts/").path(table).path("storages/S2/partitions")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartitionList(parts)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartitionList>>(
+          cubeObjectFactory.createXPartitionList(parts)){}, mediaType), APIResult.class);
       assertEquals(partAddResult.getStatus(), Status.PARTIAL);
 
       // Drop the partitions
@@ -2138,7 +2155,8 @@ public class TestMetastoreService extends LensJerseyTest {
       // Verify partition was dropped
       partitionsElement = target().path("metastore/facts").path(table).path("storages/S2/partitions")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .get(new GenericType<JAXBElement<XPartitionList>>() {});
+        .get(new GenericType<JAXBElement<XPartitionList>>() {
+        });
 
       partitions = partitionsElement.getValue();
       assertNotNull(partitions);
@@ -2152,20 +2170,29 @@ public class TestMetastoreService extends LensJerseyTest {
       // Try adding in batch, but to a wrong endpoint
       resp = target().path("metastore/facts/").path(table).path("storages/S2/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartitionList(toXPartitionList(xp))));
-      assertXMLError(resp);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartitionList>>(cubeObjectFactory
+          .createXPartitionList(toXPartitionList(xp))) {
+        }, mediaType));
+      assertXMLError(resp, mediaType);
 
 
       // Try adding in batch, but provide just an XPartition
       resp = target().path("metastore/facts/").path(table).path("storages/S2/partitions")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartition(xp)));
-      assertXMLError(resp);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartition>>(cubeObjectFactory.createXPartition(xp)){},
+          mediaType));
+      if (mediaType.equals(MediaType.APPLICATION_XML_TYPE)) {
+        assertXMLError(resp, mediaType);
+      } else {
+        // for json input, XPartitionList is getting created
+        assertEquals(resp.getStatus(), 200);
+      }
 
       // Try adding in batch with one partition being wrong wrt partition column.
       partAddResult = target().path("metastore/facts/").path(table).path("storages/S2/partitions")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartitionList(toXPartitionList(xp2))),
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartitionList>>(cubeObjectFactory
+          .createXPartitionList(toXPartitionList(xp2))){}, mediaType),
           APIResult.class);
       assertEquals(partAddResult.getStatus(), Status.FAILED);
       assertEquals(partAddResult.getMessage(), "No timeline found for fact=testFactStoragePartitions, storage=S2, "
@@ -2173,7 +2200,9 @@ public class TestMetastoreService extends LensJerseyTest {
       // Add in batch
       partAddResult = target().path("metastore/facts/").path(table).path("storages/S2/partitions")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartitionList(toXPartitionList(xp))),
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartitionList>>(cubeObjectFactory
+            .createXPartitionList(toXPartitionList(xp))) {
+          }, mediaType),
           APIResult.class);
       assertSuccess(partAddResult);
 
@@ -2203,24 +2232,24 @@ public class TestMetastoreService extends LensJerseyTest {
       assertNotNull(partitions);
       assertEquals(partitions.getPartition().size(), 0);
     } finally {
-      setCurrentDatabase(prevDb);
-      dropDatabase(DB);
+      setCurrentDatabase(prevDb, mediaType);
+      dropDatabase(DB, mediaType);
     }
   }
 
-  @Test
-  public void testDimStoragePartitions() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testDimStoragePartitions(MediaType mediaType) throws Exception {
     final String table = "testDimStoragePartitions";
-    final String DB = dbPFX + "testDimStoragePartitions_DB";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
-    createStorage("S1");
-    createStorage("S2");
-    createStorage("test");
+    final String DB = dbPFX + "testDimStoragePartitions_DB" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
+    createStorage("S1", mediaType);
+    createStorage("S2", mediaType);
+    createStorage("test", mediaType);
 
     try {
-      createDimTable(table);
+      createDimTable(table, mediaType);
       APIResult partAddResult;
       // Add null partition
       Response resp = target().path("metastore/dimtables/").path(table).path("storages/test/partition")
@@ -2233,20 +2262,23 @@ public class TestMetastoreService extends LensJerseyTest {
       XPartition xp = createPartition(table, partDate);
       partAddResult = target().path("metastore/dimtables/").path(table).path("storages/test/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartition(xp)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartition>>(cubeObjectFactory.createXPartition(xp)){},
+          mediaType), APIResult.class);
       assertSuccess(partAddResult);
 
       // create call for same
       partAddResult = target().path("metastore/dimtables/").path(table).path("storages/test/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartition(xp)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartition>>(cubeObjectFactory.createXPartition(xp)){},
+          mediaType), APIResult.class);
       assertEquals(partAddResult.getStatus(), Status.FAILED);
 
 
       xp.setLocation(xp.getLocation() + "/a/b/c");
       APIResult partUpdateResult = target().path("metastore/dimtables/").path(table).path("storages/test/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .put(Entity.xml(cubeObjectFactory.createXPartition(xp)), APIResult.class);
+        .put(Entity.entity(new GenericEntity<JAXBElement<XPartition>>(cubeObjectFactory.createXPartition(xp)){},
+          mediaType), APIResult.class);
       assertSuccess(partUpdateResult);
 
       JAXBElement<XPartitionList> partitionsElement = target().path("metastore/dimtables").path(table)
@@ -2295,7 +2327,8 @@ public class TestMetastoreService extends LensJerseyTest {
       parts.getPartition().add(xp2);
       partAddResult = target().path("metastore/dimtables/").path(table).path("storages/test/partitions")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartitionList(parts)), APIResult.class);
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartitionList>>(
+          cubeObjectFactory.createXPartitionList(parts)){}, mediaType), APIResult.class);
       assertEquals(partAddResult.getStatus(), Status.PARTIAL);
 
       // Drop the partitions
@@ -2323,19 +2356,29 @@ public class TestMetastoreService extends LensJerseyTest {
       // Try adding in batch, but to a wrong endpoint
       resp = target().path("metastore/dimtables/").path(table).path("storages/test/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartitionList(toXPartitionList(xp))));
-      assertXMLError(resp);
+        .post(Entity.entity(
+          new GenericEntity<JAXBElement<XPartitionList>>(cubeObjectFactory.createXPartitionList(toXPartitionList(xp)))
+          {}, mediaType));
+      assertXMLError(resp, mediaType);
 
       // Try adding in batch, but provide just an XPartition
       resp = target().path("metastore/dimtables/").path(table).path("storages/test/partitions")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartition(xp)));
-      assertXMLError(resp);
-
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartition>>(cubeObjectFactory.createXPartition(xp)){},
+          mediaType));
+      if (mediaType.equals(MediaType.APPLICATION_XML_TYPE)) {
+        assertXMLError(resp, mediaType);
+      } else {
+        // for json input, XPartitionList is getting created
+        assertEquals(resp.getStatus(), 200);
+      }
       // Add in batch
       partAddResult = target().path("metastore/dimtables/").path(table).path("storages/test/partitions")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartitionList(toXPartitionList(xp))),
+        .post(Entity.entity(
+          new GenericEntity<JAXBElement<XPartitionList>>(cubeObjectFactory.createXPartitionList(toXPartitionList(xp)))
+          {},
+          mediaType),
           APIResult.class);
       assertSuccess(partAddResult);
 
@@ -2367,7 +2410,8 @@ public class TestMetastoreService extends LensJerseyTest {
       // add again, this time we'll drop by filter
       partAddResult = target().path("metastore/dimtables/").path(table).path("storages/test/partitions")
         .queryParam("sessionid", lensSessionId).request(mediaType)
-        .post(Entity.xml(cubeObjectFactory.createXPartitionList(toXPartitionList(xp))),
+        .post(Entity.entity(new GenericEntity<JAXBElement<XPartitionList>>(
+            cubeObjectFactory.createXPartitionList(toXPartitionList(xp))){}, mediaType),
           APIResult.class);
       assertSuccess(partAddResult);
 
@@ -2386,17 +2430,19 @@ public class TestMetastoreService extends LensJerseyTest {
       assertNotNull(partitions);
       assertEquals(partitions.getPartition().size(), 0);
     } finally {
-      setCurrentDatabase(prevDb);
-      dropDatabase(DB);
+      setCurrentDatabase(prevDb, mediaType);
+      dropDatabase(DB, mediaType);
     }
   }
 
-  private void assertXMLError(Response resp) {
+  private void assertXMLError(Response resp, MediaType mt) {
     assertEquals(resp.getStatus(), 400);
-    LensAPIResult entity = resp.readEntity(LensAPIResult.class);
-    assertTrue(entity.isErrorResult());
-    assertEquals(entity.getLensErrorTO().getCode(), LensCommonErrorCode.INVALID_XML_ERROR.getValue());
-    assertTrue(entity.getLensErrorTO().getMessage().contains("unexpected element"));
+    if (mt.equals(MediaType.APPLICATION_XML_TYPE)) {
+      LensAPIResult entity = resp.readEntity(LensAPIResult.class);
+      assertTrue(entity.isErrorResult());
+      assertEquals(entity.getLensErrorTO().getCode(), LensCommonErrorCode.INVALID_XML_ERROR.getValue());
+      assertTrue(entity.getLensErrorTO().getMessage().contains("unexpected element"));
+    }
   }
 
   private XPartitionList toXPartitionList(final XPartition... xps) {
@@ -2405,12 +2451,12 @@ public class TestMetastoreService extends LensJerseyTest {
     return ret;
   }
 
-  @Test
-  public void testNativeTables() throws Exception {
-    final String DB = dbPFX + "test_native_tables";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
+  @Test(dataProvider = "mediaTypeData")
+  public void testNativeTables(MediaType mediaType) throws Exception {
+    final String DB = dbPFX + "test_native_tables" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
 
     try {
       // create hive table
@@ -2498,7 +2544,8 @@ public class TestMetastoreService extends LensJerseyTest {
       JAXBElement<XCube> element = cubeObjectFactory.createXCube(cube);
       APIResult result =
         target().path("metastore").path("cubes").queryParam("sessionid",
-          lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
+          lensSessionId).request(mediaType).post(Entity.entity(new GenericEntity<JAXBElement<XCube>>(element){},
+          mediaType), APIResult.class);
       assertSuccess(result);
 
       // get a cube table
@@ -2521,8 +2568,8 @@ public class TestMetastoreService extends LensJerseyTest {
         .queryParam("dbName", "nonexisting").request(mediaType).get(Response.class);
       assertEquals(response.getStatus(), Response.Status.NOT_FOUND.getStatusCode());
     } finally {
-      dropDatabase(DB);
-      setCurrentDatabase(prevDb);
+      dropDatabase(DB, mediaType);
+      setCurrentDatabase(prevDb, mediaType);
     }
   }
 
@@ -2543,12 +2590,12 @@ public class TestMetastoreService extends LensJerseyTest {
     }
   }
 
-  @Test
-  public void testFlattenedView() throws Exception {
-    final String DB = dbPFX + "test_flattened_view";
-    String prevDb = getCurrentDatabase();
-    createDatabase(DB);
-    setCurrentDatabase(DB);
+  @Test(dataProvider = "mediaTypeData")
+  public void testFlattenedView(MediaType mediaType) throws Exception {
+    final String DB = dbPFX + "test_flattened_view" + mediaType.getSubtype();
+    String prevDb = getCurrentDatabase(mediaType);
+    createDatabase(DB, mediaType);
+    setCurrentDatabase(DB, mediaType);
 
     try {
       // Create the tables
@@ -2557,12 +2604,13 @@ public class TestMetastoreService extends LensJerseyTest {
       final WebTarget cubeTarget = target().path("metastore").path("cubes");
       APIResult result =
         cubeTarget.queryParam("sessionid", lensSessionId).request(mediaType)
-          .post(Entity.xml(cubeObjectFactory.createXCube(flatTestCube)), APIResult.class);
+          .post(Entity.entity(new GenericEntity<JAXBElement<XCube>>(cubeObjectFactory.createXCube(flatTestCube)){},
+            mediaType), APIResult.class);
       assertNotNull(result);
       assertSuccess(result);
 
       // create chained dimensions - testdim and testdim2
-      createdChainedDimensions();
+      createdChainedDimensions(mediaType);
 
       // Now test flattened view
       final WebTarget flatCubeTarget = target().path("metastore").path("flattened").path("flattestcube");
@@ -2630,13 +2678,13 @@ public class TestMetastoreService extends LensJerseyTest {
       ));
 
     } finally {
-      dropDatabase(DB);
-      setCurrentDatabase(prevDb);
+      dropDatabase(DB, mediaType);
+      setCurrentDatabase(prevDb, mediaType);
     }
   }
 
   private void createTestFactAndStorageTable(final String cubeName, final String[] storages, final String tableName,
-    final String[] timePartColNames) {
+    final String[] timePartColNames, MediaType mediaType) {
 
     // Create a fact table object linked to cubeName
     XFactTable f = createFactTable(tableName, cubeName);
@@ -2647,17 +2695,14 @@ public class TestMetastoreService extends LensJerseyTest {
     }
 
     // Call API to create a fact table and storage table
-    final FormDataMultiPart mp = new FormDataMultiPart();
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(),
-      lensSessionId, medType));
-    mp.bodyPart(new FormDataBodyPart(
-      FormDataContentDisposition.name("fact").fileName("fact").build(),
-      cubeObjectFactory.createXFactTable(f), medType));
     APIResult result = target()
       .path("metastore")
-      .path("facts")
+      .path("facts").queryParam("sessionid", lensSessionId)
       .request(mediaType)
-      .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
+      .post(Entity.entity(
+          new GenericEntity<JAXBElement<XFactTable>>(cubeObjectFactory.createXFactTable(f)) {
+          }, mediaType),
+        APIResult.class);
     assertSuccess(result);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreServiceJSON.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreServiceJSON.java b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreServiceJSON.java
deleted file mode 100644
index 85580da..0000000
--- a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreServiceJSON.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.server.metastore;
-
-
-public class TestMetastoreServiceJSON { //extends TestMetastoreService {
-  /*public TestMetastoreServiceJSON() {
-    super();
-    mediaType = MediaType.APPLICATION_JSON;
-    dbPFX = "TestMetastoreServiceJSON_";
-  }*/
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/metrics/TestResourceMethodMetrics.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/metrics/TestResourceMethodMetrics.java b/lens-server/src/test/java/org/apache/lens/server/metrics/TestResourceMethodMetrics.java
index 515ac13..c20ddf8 100644
--- a/lens-server/src/test/java/org/apache/lens/server/metrics/TestResourceMethodMetrics.java
+++ b/lens-server/src/test/java/org/apache/lens/server/metrics/TestResourceMethodMetrics.java
@@ -50,11 +50,10 @@ import org.apache.lens.server.common.TestResourceFile;
 import org.apache.lens.server.metastore.CubeMetastoreServiceImpl;
 import org.apache.lens.server.query.TestQueryService;
 
-import org.glassfish.jersey.client.ClientConfig;
 import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataMultiPart;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.glassfish.jersey.test.TestProperties;
 import org.testng.Assert;
 import org.testng.annotations.AfterTest;
 import org.testng.annotations.BeforeTest;
@@ -87,16 +86,16 @@ public class TestResourceMethodMetrics extends LensAllApplicationJerseyTest {
   }
 
   private void createTable(String tblName) throws InterruptedException {
-    LensServerTestUtil.createTable(tblName, target(), lensSessionId);
+    LensServerTestUtil.createTable(tblName, target(), lensSessionId, defaultMT);
   }
 
   private void loadData(String tblName, final String testDataFile) throws InterruptedException {
-    LensServerTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId);
+    LensServerTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId, defaultMT);
   }
 
   @AfterTest
   public void tearDown() throws Exception {
-    LensServerTestUtil.dropTable(TestQueryService.TEST_TABLE, target(), lensSessionId);
+    LensServerTestUtil.dropTable(TestQueryService.TEST_TABLE, target(), lensSessionId, defaultMT);
     metastoreService.closeSession(lensSessionId);
     super.tearDown();
   }
@@ -110,14 +109,11 @@ public class TestResourceMethodMetrics extends LensAllApplicationJerseyTest {
 
   @Override
   protected Application configure() {
+    enable(TestProperties.LOG_TRAFFIC);
+    enable(TestProperties.DUMP_ENTITY);
     return new LensApplication();
   }
 
-  @Override
-  protected void configureClient(ClientConfig config) {
-    config.register(MultiPartFeature.class);
-  }
-
   @Test
   public void test() throws Exception {
     boolean enabled = metricsSvc.isEnableResourceMethodMetering();
@@ -241,7 +237,7 @@ public class TestResourceMethodMetrics extends LensAllApplicationJerseyTest {
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
       MediaType.APPLICATION_XML_TYPE));
-    final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+    final QueryHandle handle = target.request(mediaType).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
         new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
     Assert.assertNotNull(handle);


[44/51] [abbrv] lens git commit: LENS-920 : Fix issues in producing and consuming json for all api

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/session/TestSessionResource.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/session/TestSessionResource.java b/lens-server/src/test/java/org/apache/lens/server/session/TestSessionResource.java
index 3055ce5..bde7b9b 100644
--- a/lens-server/src/test/java/org/apache/lens/server/session/TestSessionResource.java
+++ b/lens-server/src/test/java/org/apache/lens/server/session/TestSessionResource.java
@@ -53,11 +53,10 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 
-import org.glassfish.jersey.client.ClientConfig;
 import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataMultiPart;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.glassfish.jersey.test.TestProperties;
 import org.testng.Assert;
 import org.testng.annotations.AfterTest;
 import org.testng.annotations.BeforeTest;
@@ -101,39 +100,31 @@ public class TestSessionResource extends LensJerseyTest {
    */
   @Override
   protected Application configure() {
+    enable(TestProperties.LOG_TRAFFIC);
+    enable(TestProperties.DUMP_ENTITY);
     return new SessionApp();
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.glassfish.jersey.test.JerseyTest#configureClient(org.glassfish.jersey.client.ClientConfig)
-   */
-  @Override
-  protected void configureClient(ClientConfig config) {
-    config.register(MultiPartFeature.class);
-  }
-
   /**
    * Test session.
    */
-  @Test
-  public void testSession() {
+  @Test(dataProvider = "mediaTypeData")
+  public void testSession(MediaType mt) {
     final WebTarget target = target().path("session");
     final FormDataMultiPart mp = new FormDataMultiPart();
 
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("username").build(), "foo"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("password").build(), "bar"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionconf").fileName("sessionconf").build(),
-      new LensConf(), MediaType.APPLICATION_XML_TYPE));
+      new LensConf(), mt));
 
-    final LensSessionHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+    final LensSessionHandle handle = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
       LensSessionHandle.class);
     Assert.assertNotNull(handle);
 
     // get all session params
     final WebTarget paramtarget = target().path("session/params");
-    StringList sessionParams = paramtarget.queryParam("sessionid", handle).request().get(StringList.class);
+    StringList sessionParams = paramtarget.queryParam("sessionid", handle).request(mt).get(StringList.class);
     System.out.println("Session params:" + sessionParams.getElements());
     Assert.assertTrue(sessionParams.getElements().size() > 1);
     Assert.assertTrue(sessionParams.getElements().contains("lens.session.cluster.user=testlensuser"));
@@ -142,14 +133,14 @@ public class TestSessionResource extends LensJerseyTest {
     // set hive variable
     FormDataMultiPart setpart = new FormDataMultiPart();
     setpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), handle,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     setpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("key").build(), "hivevar:myvar"));
     setpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("value").build(), "10"));
-    APIResult result = paramtarget.request().put(Entity.entity(setpart, MediaType.MULTIPART_FORM_DATA_TYPE),
+    APIResult result = paramtarget.request(mt).put(Entity.entity(setpart, MediaType.MULTIPART_FORM_DATA_TYPE),
       APIResult.class);
     Assert.assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
     // get myvar session params
-    sessionParams = paramtarget.queryParam("sessionid", handle).queryParam("key", "hivevar:myvar").request()
+    sessionParams = paramtarget.queryParam("sessionid", handle).queryParam("key", "hivevar:myvar").request(mt)
       .get(StringList.class);
     System.out.println("Session params:" + sessionParams.getElements());
     Assert.assertEquals(sessionParams.getElements().size(), 1);
@@ -158,39 +149,39 @@ public class TestSessionResource extends LensJerseyTest {
     // set hive conf
     setpart = new FormDataMultiPart();
     setpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), handle,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     setpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("key").build(), "hiveconf:my.conf"));
     setpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("value").build(), "myvalue"));
-    result = paramtarget.request().put(Entity.entity(setpart, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
+    result = paramtarget.request(mt).put(Entity.entity(setpart, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
     Assert.assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
     // get the my.conf session param
-    sessionParams = paramtarget.queryParam("sessionid", handle).queryParam("key", "my.conf").request()
+    sessionParams = paramtarget.queryParam("sessionid", handle).queryParam("key", "my.conf").request(mt)
       .get(StringList.class);
     System.out.println("Session params:" + sessionParams.getElements());
     Assert.assertEquals(sessionParams.getElements().size(), 1);
     Assert.assertTrue(sessionParams.getElements().contains("my.conf=myvalue"));
     // get server params on session
     try {
-      paramtarget.queryParam("sessionid", handle).queryParam("key", "lens.server.persist.location").request()
+      paramtarget.queryParam("sessionid", handle).queryParam("key", "lens.server.persist.location").request(mt)
         .get(StringList.class);
       Assert.fail("Expected 404");
     } catch (Exception ne) {
       Assert.assertTrue(ne instanceof NotFoundException);
     }
     // get all params verbose
-    sessionParams = paramtarget.queryParam("sessionid", handle).queryParam("verbose", true).request()
+    sessionParams = paramtarget.queryParam("sessionid", handle).queryParam("verbose", true).request(mt)
       .get(StringList.class);
     System.out.println("Session params:" + sessionParams.getElements());
     Assert.assertTrue(sessionParams.getElements().size() > 1);
 
     // Create another session
-    final LensSessionHandle handle2 = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+    final LensSessionHandle handle2 = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
       LensSessionHandle.class);
     Assert.assertNotNull(handle);
 
     // get myvar session params on handle2
     try {
-      paramtarget.queryParam("sessionid", handle2).queryParam("key", "hivevar:myvar").request()
+      paramtarget.queryParam("sessionid", handle2).queryParam("key", "hivevar:myvar").request(mt)
         .get(StringList.class);
       Assert.fail("Expected 404");
     } catch (Exception ne) {
@@ -198,7 +189,7 @@ public class TestSessionResource extends LensJerseyTest {
     }
     // get the my.conf session param on handle2
     try {
-      sessionParams = paramtarget.queryParam("sessionid", handle2).queryParam("key", "my.conf").request()
+      sessionParams = paramtarget.queryParam("sessionid", handle2).queryParam("key", "my.conf").request(mt)
         .get(StringList.class);
       System.out.println("sessionParams:" + sessionParams.getElements());
       Assert.fail("Expected 404");
@@ -207,36 +198,36 @@ public class TestSessionResource extends LensJerseyTest {
     }
 
     // close session
-    result = target.queryParam("sessionid", handle).request().delete(APIResult.class);
+    result = target.queryParam("sessionid", handle).request(mt).delete(APIResult.class);
     Assert.assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
 
     // now getting session params should return session is expired
     try {
-      sessionParams = paramtarget.queryParam("sessionid", handle).queryParam("key", "hivevar:myvar").request()
+      sessionParams = paramtarget.queryParam("sessionid", handle).queryParam("key", "hivevar:myvar").request(mt)
             .get(StringList.class);
       Assert.fail("Expected 410");
     } catch(ClientErrorException ce) {
       Assert.assertEquals(ce.getResponse().getStatus(), 410);
     }
 
-    result = target.queryParam("sessionid", handle2).request().delete(APIResult.class);
+    result = target.queryParam("sessionid", handle2).request(mt).delete(APIResult.class);
     Assert.assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
   }
 
   /**
    * Test resource.
    */
-  @Test
-  public void testResource() {
+  @Test(dataProvider = "mediaTypeData")
+  public void testResource(MediaType mt) {
     final WebTarget target = target().path("session");
     final FormDataMultiPart mp = new FormDataMultiPart();
 
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("username").build(), "foo"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("password").build(), "bar"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionconf").fileName("sessionconf").build(),
-      new LensConf(), MediaType.APPLICATION_XML_TYPE));
+      new LensConf(), mt));
 
-    final LensSessionHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+    final LensSessionHandle handle = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
       LensSessionHandle.class);
     Assert.assertNotNull(handle);
 
@@ -245,37 +236,38 @@ public class TestSessionResource extends LensJerseyTest {
     final WebTarget resourcetarget = target().path("session/resources");
     final FormDataMultiPart mp1 = new FormDataMultiPart();
     mp1.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), handle,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp1.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("type").build(), "file"));
     mp1.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("path").build(),
         lensSiteFilePath));
-    APIResult result = resourcetarget.path("add").request()
+    APIResult result = resourcetarget.path("add").request(mt)
       .put(Entity.entity(mp1, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
     Assert.assertEquals(result.getStatus(), Status.SUCCEEDED);
 
     // list all resources
-    StringList listResources = resourcetarget.path("list").queryParam("sessionid", handle).request()
+    StringList listResources = resourcetarget.path("list").queryParam("sessionid", handle).request(mt)
       .get(StringList.class);
     Assert.assertEquals(listResources.getElements().size(), 1);
 
     // delete the resource
     final FormDataMultiPart mp2 = new FormDataMultiPart();
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), handle,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("type").build(), "file"));
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("path").build(),
         lensSiteFilePath));
-    result = resourcetarget.path("delete").request()
+    result = resourcetarget.path("delete").request(mt)
       .put(Entity.entity(mp2, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
     Assert.assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
 
     // list all resources
     StringList listResourcesAfterDeletion = resourcetarget.path("list").queryParam("sessionid", handle)
-      .request().get(StringList.class);
-    Assert.assertNull(listResourcesAfterDeletion.getElements());
+      .request(mt).get(StringList.class);
+    Assert.assertTrue(listResourcesAfterDeletion.getElements() == null
+      || listResourcesAfterDeletion.getElements().isEmpty());
 
     // close session
-    result = target.queryParam("sessionid", handle).request().delete(APIResult.class);
+    result = target.queryParam("sessionid", handle).request(mt).delete(APIResult.class);
     Assert.assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
   }
 
@@ -284,8 +276,8 @@ public class TestSessionResource extends LensJerseyTest {
    *
    * @throws org.apache.lens.server.api.error.LensException the lens exception
    */
-  @Test
-  public void testAuxJars() throws LensException, IOException, LenServerTestException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testAuxJars(MediaType mt) throws LensException, IOException, LenServerTestException {
     final WebTarget target = target().path("session");
     final FormDataMultiPart mp = new FormDataMultiPart();
     final LensConf sessionconf = new LensConf();
@@ -299,11 +291,9 @@ public class TestSessionResource extends LensJerseyTest {
       mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("username").build(), "foo"));
       mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("password").build(), "bar"));
       mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionconf").fileName("sessionconf").build(),
-        sessionconf, MediaType.APPLICATION_XML_TYPE));
-      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionconf").fileName("sessionconf").build(),
-        new LensConf(), MediaType.APPLICATION_XML_TYPE));
+        sessionconf, mt));
 
-      final LensSessionHandle handle = target.request()
+      final LensSessionHandle handle = target.request(mt)
         .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), LensSessionHandle.class);
       Assert.assertNotNull(handle);
 
@@ -320,13 +310,13 @@ public class TestSessionResource extends LensJerseyTest {
 
       final WebTarget resourcetarget = target().path("session/resources");
       // list all resources
-      StringList listResources = resourcetarget.path("list").queryParam("sessionid", handle).request()
+      StringList listResources = resourcetarget.path("list").queryParam("sessionid", handle).request(mt)
         .get(StringList.class);
       Assert.assertEquals(listResources.getElements().size(), 1);
       Assert.assertTrue(listResources.getElements().get(0).contains(jarFileName));
 
       // close session
-      APIResult result = target.queryParam("sessionid", handle).request().delete(APIResult.class);
+      APIResult result = target.queryParam("sessionid", handle).request(mt).delete(APIResult.class);
       Assert.assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
     } finally {
       LensServerTestFileUtils.deleteFile(jarFile);
@@ -336,22 +326,23 @@ public class TestSessionResource extends LensJerseyTest {
   /**
    * Test wrong auth.
    */
-  @Test
-  public void testWrongAuth() {
+  @Test(dataProvider = "mediaTypeData")
+  public void testWrongAuth(MediaType mt) {
     final WebTarget target = target().path("session");
     final FormDataMultiPart mp = new FormDataMultiPart();
 
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("username").build(), "a"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("password").build(), "b"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionconf").fileName("sessionconf").build(),
-      new LensConf(), MediaType.APPLICATION_XML_TYPE));
+      new LensConf(), mt));
 
-    final Response handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
+    final Response handle = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
     Assert.assertEquals(handle.getStatus(), 401);
   }
 
-  @Test
-  public void testServerMustRestartOnManualDeletionOfAddedResources() throws IOException, LenServerTestException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testServerMustRestartOnManualDeletionOfAddedResources(MediaType mt)
+    throws IOException, LenServerTestException {
 
     /* Begin: Setup */
 
@@ -360,8 +351,8 @@ public class TestSessionResource extends LensJerseyTest {
     FileUtils.touch(jarFile);
 
     /* Add the created resource jar to lens server */
-    LensSessionHandle sessionHandle = openSession("foo", "bar", new LensConf());
-    addResource(sessionHandle, "jar", jarFile.getPath());
+    LensSessionHandle sessionHandle = openSession("foo", "bar", new LensConf(), mt);
+    addResource(sessionHandle, "jar", jarFile.getPath(), mt);
 
     /* Delete resource jar from current working directory */
     LensServerTestFileUtils.deleteFile(jarFile);
@@ -372,7 +363,7 @@ public class TestSessionResource extends LensJerseyTest {
     restartLensServer();
   }
 
-  private LensSessionHandle openSession(final String userName, final String passwd, final LensConf conf) {
+  private LensSessionHandle openSession(final String userName, final String passwd, final LensConf conf, MediaType mt) {
 
     final WebTarget target = target().path("session");
     final FormDataMultiPart mp = new FormDataMultiPart();
@@ -380,23 +371,23 @@ public class TestSessionResource extends LensJerseyTest {
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("username").build(), userName));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("password").build(), passwd));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionconf").fileName("sessionconf").build(),
-      conf, MediaType.APPLICATION_XML_TYPE));
+      conf, mt));
 
-    return target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+    return target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
       LensSessionHandle.class);
 
   }
 
   private void addResource(final LensSessionHandle lensSessionHandle, final String resourceType,
-    final String resourcePath) {
+    final String resourcePath, MediaType mt) {
     final WebTarget target = target().path("session/resources");
     final FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionHandle,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("type").build(), resourceType));
     mp.bodyPart(
       new FormDataBodyPart(FormDataContentDisposition.name("path").build(), resourcePath));
-    APIResult result = target.path("add").request()
+    APIResult result = target.path("add").request(mt)
       .put(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
 
     if (!result.getStatus().equals(Status.SUCCEEDED)) {
@@ -404,8 +395,8 @@ public class TestSessionResource extends LensJerseyTest {
     }
   }
 
-  @Test
-  public void testOpenSessionWithDatabase() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testOpenSessionWithDatabase(MediaType mt) throws Exception {
     // TEST1 - Check if call with database parameter sets current database
     // Create the test DB
     Hive hive = Hive.get(new HiveConf());
@@ -421,9 +412,9 @@ public class TestSessionResource extends LensJerseyTest {
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("password").build(), "bar"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("database").build(), testDbName));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionconf").fileName("sessionconf").build(),
-      new LensConf(), MediaType.APPLICATION_XML_TYPE));
+      new LensConf(), mt));
 
-    final LensSessionHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+    final LensSessionHandle handle = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
       LensSessionHandle.class);
     Assert.assertNotNull(handle);
 
@@ -441,11 +432,11 @@ public class TestSessionResource extends LensJerseyTest {
     form2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("password").build(), "bar"));
     form2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("database").build(), invalidDB));
     form2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionconf").fileName("sessionconf").build(),
-      new LensConf(), MediaType.APPLICATION_XML_TYPE));
+      new LensConf(), mt));
 
     try {
-      final LensSessionHandle handle2 = target.request().post(Entity.entity(form2, MediaType.MULTIPART_FORM_DATA_TYPE),
-        LensSessionHandle.class);
+      final LensSessionHandle handle2 = target.request(mt).post(Entity.entity(form2,
+          MediaType.MULTIPART_FORM_DATA_TYPE), LensSessionHandle.class);
       Assert.fail("Expected above call to fail with not found exception");
     } catch (NotFoundException nfe) {
       // PASS
@@ -455,8 +446,8 @@ public class TestSessionResource extends LensJerseyTest {
   /**
    * Test acquire and release behaviour for closed sessions
    */
-  @Test
-  public void testAcquireReleaseClosedSession() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testAcquireReleaseClosedSession(MediaType mt) throws Exception {
     HiveSessionService sessionService = LensServices.get().getService(SessionService.NAME);
 
     LensSessionHandle sessionHandle = sessionService.openSession("foo@localhost", "bar", new HashMap<String, String>());
@@ -485,39 +476,39 @@ public class TestSessionResource extends LensJerseyTest {
     }
   }
 
-  private FormDataMultiPart getMultiFormData(String username, String password) {
+  private FormDataMultiPart getMultiFormData(String username, String password, MediaType mt) {
     final FormDataMultiPart mp = new FormDataMultiPart();
 
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("username").build(), username));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("password").build(), password));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionconf").fileName("sessionconf").build(),
-        new LensConf(), MediaType.APPLICATION_XML_TYPE));
+        new LensConf(), mt));
     return mp;
   }
 
-  @Test
-  public void testSessionEvents() {
+  @Test(dataProvider = "mediaTypeData")
+  public void testSessionEvents(MediaType mt) {
     final WebTarget target = target().path("session");
-    FormDataMultiPart mp = getMultiFormData("foo", "bar");
+    FormDataMultiPart mp = getMultiFormData("foo", "bar", mt);
 
-    LensSessionHandle lensSessionHandle = target.request().post(
+    LensSessionHandle lensSessionHandle = target.request(mt).post(
         Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), LensSessionHandle.class);
     Assert.assertTrue(lensSessionHandle != null);
     Assert.assertTrue(metricsSvc.getTotalOpenedSessions() >= 1);
     Assert.assertTrue(metricsSvc.getActiveSessions() >= 1);
 
-    LensSessionHandle lensSessionHandle1 = target.request().post(
+    LensSessionHandle lensSessionHandle1 = target.request(mt).post(
         Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), LensSessionHandle.class);
     Assert.assertTrue(lensSessionHandle1 != null);
     Assert.assertTrue(metricsSvc.getTotalOpenedSessions() >= 2);
     Assert.assertTrue(metricsSvc.getActiveSessions() >= 2);
 
-    APIResult result = target.queryParam("sessionid", lensSessionHandle).request().delete(APIResult.class);
+    APIResult result = target.queryParam("sessionid", lensSessionHandle).request(mt).delete(APIResult.class);
     Assert.assertTrue(metricsSvc.getTotalOpenedSessions() >= 1);
     Assert.assertTrue(metricsSvc.getTotalClosedSessions() >= 1);
     Assert.assertTrue(metricsSvc.getActiveSessions() >= 1);
 
-    result = target.queryParam("sessionid", lensSessionHandle1).request().delete(APIResult.class);
+    result = target.queryParam("sessionid", lensSessionHandle1).request(mt).delete(APIResult.class);
     Assert.assertTrue(metricsSvc.getTotalClosedSessions() >= 2);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/ui/TestSessionUIResource.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/ui/TestSessionUIResource.java b/lens-server/src/test/java/org/apache/lens/server/ui/TestSessionUIResource.java
index 6f7c216..be87e65 100644
--- a/lens-server/src/test/java/org/apache/lens/server/ui/TestSessionUIResource.java
+++ b/lens-server/src/test/java/org/apache/lens/server/ui/TestSessionUIResource.java
@@ -28,11 +28,9 @@ import org.apache.lens.api.LensConf;
 import org.apache.lens.api.LensSessionHandle;
 import org.apache.lens.server.LensJerseyTest;
 
-import org.glassfish.jersey.client.ClientConfig;
 import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataMultiPart;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
 import org.testng.Assert;
 import org.testng.annotations.AfterTest;
 import org.testng.annotations.BeforeTest;
@@ -74,16 +72,6 @@ public class TestSessionUIResource extends LensJerseyTest {
     return new UIApp();
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.glassfish.jersey.test.JerseyTest#configureClient(org.glassfish.jersey.client.ClientConfig)
-   */
-  @Override
-  protected void configureClient(ClientConfig config) {
-    config.register(MultiPartFeature.class);
-  }
-
   private FormDataMultiPart getMultiFormData(String username, String password) {
     final FormDataMultiPart mp = new FormDataMultiPart();
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 746204e..349b71c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -38,8 +38,8 @@
     <servlet3.version>3.0.1</servlet3.version>
     <servlet.api.version>2.5</servlet.api.version>
     <jsp.api.version>2.0</jsp.api.version>
-    <ws.rs.version>2.0</ws.rs.version>
-    <jaxb.api.version>2.2.11</jaxb.api.version>
+    <ws.rs.version>2.0.1</ws.rs.version>
+    <jaxb.api.version>2.2.12</jaxb.api.version>
     <javax.mail.version>1.4</javax.mail.version>
 
     <!-- hadoop stack -->
@@ -71,8 +71,8 @@
     <libthrift.version>0.9.0</libthrift.version>
 
     <jetty.version>6.1.26</jetty.version>
-    <jersey.version>2.3.1</jersey.version>
-    <grizzly.version>2.3.6</grizzly.version>
+    <jersey.version>2.22.1</jersey.version>
+    <grizzly.version>2.3.23</grizzly.version>
     <metrics.version>3.0.2</metrics.version>
     <subethasmtp.version>3.1.7</subethasmtp.version>
     <liquibase.version>3.0.7</liquibase.version>
@@ -963,6 +963,10 @@
             <artifactId>jersey-server</artifactId>
           </exclusion>
           <exclusion>
+            <groupId>com.sun.jersey</groupId>
+            <artifactId>jersey-client</artifactId>
+          </exclusion>
+          <exclusion>
             <groupId>tomcat</groupId>
             <artifactId>jasper-compiler</artifactId>
           </exclusion>
@@ -1293,6 +1297,11 @@
       </dependency>
       <dependency>
         <groupId>org.glassfish.jersey.media</groupId>
+        <artifactId>jersey-media-jaxb</artifactId>
+        <version>${jersey.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.glassfish.jersey.media</groupId>
         <artifactId>jersey-media-multipart</artifactId>
         <version>${jersey.version}</version>
       </dependency>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/src/site/apt/admin/config.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/admin/config.apt b/src/site/apt/admin/config.apt
index 703abb3..fb15d83 100644
--- a/src/site/apt/admin/config.apt
+++ b/src/site/apt/admin/config.apt
@@ -123,130 +123,134 @@ Lens server configuration
 *--+--+---+--+
 |48|lens.server.mode|OPEN|The mode in which server should run. Allowed values are OPEN, READ_ONLY, METASTORE_READONLY, METASTORE_NODROP. OPEN mode will allow all requests. READ_ONLY mode will allow all requests on session resouce and only GET requests on all other resources. METASTORE_READONLY will allow GET on metastore and all other requests in other services. METASTORE_NODROP will not allow DELETE on metastore, will allow all other requests.|
 *--+--+---+--+
-|49|lens.server.multipart.ws.feature.impl|org.glassfish.jersey.media.multipart.MultiPartFeature|Implementation class for query scheduler resource|
+|49|lens.server.moxyjson.ws.feature.impl|org.glassfish.jersey.moxy.json.MoxyJsonFeature|Enable Moxy json feature|
 *--+--+---+--+
-|50|lens.server.persist.location|file:///tmp/lensserver|The directory in which lens server will persist its state when it is going down. The location be on any Hadoop compatible file system. Server will read from the location when it is restarted and recovery is enabled. So, Server should have both read and write permissions to the location|
+|50|lens.server.moxyjsonconfigresovler.ws.feature.impl|org.apache.lens.api.util.MoxyJsonConfigurationContextResolver|Moxy json configuration resolver|
 *--+--+---+--+
-|51|lens.server.query.acceptors| |Query Acceptors configured. Query acceptors are consulted first, before anything happens for the given query. They can either return null or return a messaging indicating why the given query shouldn't be accepted. These can be used to filter out queries at the earliest.|
+|51|lens.server.multipart.ws.feature.impl|org.glassfish.jersey.media.multipart.MultiPartFeature|Implementation class for query scheduler resource|
 *--+--+---+--+
-|52|lens.server.query.launching.constraint.factories|org.apache.lens.server.query.constraint.TotalQueryCostCeilingConstraintFactory|Factories used to instantiate constraints enforced on queries by lens. Every Factory should be an implementation of org.apache.lens.server.api.common.ConfigBasedObjectCreationFactory and create an implementation of org.apache.lens.server.api.query.constraint.QueryLaunchingConstraint. A query will be launched only if all constraints pass.|
+|52|lens.server.persist.location|file:///tmp/lensserver|The directory in which lens server will persist its state when it is going down. The location be on any Hadoop compatible file system. Server will read from the location when it is restarted and recovery is enabled. So, Server should have both read and write permissions to the location|
 *--+--+---+--+
-|53|lens.server.query.phase1.rewriters| |Query phase 1 rewriters. This is to convert user query to cube query. The resulting cube query will be passed for validation and rewriting to hql query.\ |
+|53|lens.server.query.acceptors| |Query Acceptors configured. Query acceptors are consulted first, before anything happens for the given query. They can either return null or return a messaging indicating why the given query shouldn't be accepted. These can be used to filter out queries at the earliest.|
+*--+--+---+--+
+|54|lens.server.query.launching.constraint.factories|org.apache.lens.server.query.constraint.TotalQueryCostCeilingConstraintFactory|Factories used to instantiate constraints enforced on queries by lens. Every Factory should be an implementation of org.apache.lens.server.api.common.ConfigBasedObjectCreationFactory and create an implementation of org.apache.lens.server.api.query.constraint.QueryLaunchingConstraint. A query will be launched only if all constraints pass.|
+*--+--+---+--+
+|55|lens.server.query.phase1.rewriters| |Query phase 1 rewriters. This is to convert user query to cube query. The resulting cube query will be passed for validation and rewriting to hql query.\ |
 |  |                                  | |Use cases will be to use extra intelligence to convert user query to optimized cube query.                                                              \ |
 |  |                                  | |Or define shortcuts for certain frequently used queries :)                                                                                                |
 *--+--+---+--+
-|54|lens.server.query.resultset.retention|1 day|Lens query resultset retention period. Default 1 day|
+|56|lens.server.query.resultset.retention|1 day|Lens query resultset retention period. Default 1 day|
 *--+--+---+--+
-|55|lens.server.query.service.impl|org.apache.lens.server.query.QueryExecutionServiceImpl|Implementation class for query execution service|
+|57|lens.server.query.service.impl|org.apache.lens.server.query.QueryExecutionServiceImpl|Implementation class for query execution service|
 *--+--+---+--+
-|56|lens.server.query.state.logger.enabled|true|Disable or enable the query state logger with this config. The location for the logger can be specified in logback xml for the class org.apache.lens.server.query.QueryExecutionServiceImpl.QueryStatusLogger|
+|58|lens.server.query.state.logger.enabled|true|Disable or enable the query state logger with this config. The location for the logger can be specified in logback xml for the class org.apache.lens.server.query.QueryExecutionServiceImpl.QueryStatusLogger|
 *--+--+---+--+
-|57|lens.server.query.ws.resource.impl|org.apache.lens.server.query.QueryServiceResource|Implementation class for Query Resource|
+|59|lens.server.query.ws.resource.impl|org.apache.lens.server.query.QueryServiceResource|Implementation class for Query Resource|
 *--+--+---+--+
-|58|lens.server.querypurger.sleep.interval|10000|The interval(milliseconds) with which purger to run periodically. Default 10 sec.|
+|60|lens.server.querypurger.sleep.interval|10000|The interval(milliseconds) with which purger to run periodically. Default 10 sec.|
 *--+--+---+--+
-|59|lens.server.quota.service.impl|org.apache.lens.server.quota.QuotaServiceImpl|Implementation class for quota service|
+|61|lens.server.quota.service.impl|org.apache.lens.server.quota.QuotaServiceImpl|Implementation class for quota service|
 *--+--+---+--+
-|60|lens.server.quota.ws.resource.impl|org.apache.lens.server.quota.QuotaResource|Implementation class for Quota Resource|
+|62|lens.server.quota.ws.resource.impl|org.apache.lens.server.quota.QuotaResource|Implementation class for Quota Resource|
 *--+--+---+--+
-|61|lens.server.recover.onrestart|true|If the flag is enabled, all the services will be started from last saved state, if disabled all the services will start afresh|
+|63|lens.server.recover.onrestart|true|If the flag is enabled, all the services will be started from last saved state, if disabled all the services will start afresh|
 *--+--+---+--+
-|62|lens.server.restart.enabled|true|If flag is enabled, all the services will be persisted to persistent location passed.|
+|64|lens.server.restart.enabled|true|If flag is enabled, all the services will be persisted to persistent location passed.|
 *--+--+---+--+
-|63|lens.server.resultset.purge.enabled|false|Whether to purge the query results|
+|65|lens.server.resultset.purge.enabled|false|Whether to purge the query results|
 *--+--+---+--+
-|64|lens.server.resultsetpurger.sleep.interval.secs|3600|Periodicity for Query result purger runs. Default 1 hour.|
+|66|lens.server.resultsetpurger.sleep.interval.secs|3600|Periodicity for Query result purger runs. Default 1 hour.|
 *--+--+---+--+
-|65|lens.server.savedquery.jdbc.dialectclass|org.apache.lens.server.query.save.SavedQueryDao$HSQLDialect|Dialect of the target DB, Default is HSQL. Override with the target DB used.|
+|67|lens.server.savedquery.jdbc.dialectclass|org.apache.lens.server.query.save.SavedQueryDao$HSQLDialect|Dialect of the target DB, Default is HSQL. Override with the target DB used.|
 *--+--+---+--+
-|66|lens.server.savedquery.list.default.count|20|Key denoting the default fetch value of saved query list api.|
+|68|lens.server.savedquery.list.default.count|20|Key denoting the default fetch value of saved query list api.|
 *--+--+---+--+
-|67|lens.server.savedquery.list.default.offset|0|Key denoting the default start value of saved query list api.|
+|69|lens.server.savedquery.list.default.offset|0|Key denoting the default start value of saved query list api.|
 *--+--+---+--+
-|68|lens.server.savedquery.service.impl|org.apache.lens.server.query.save.SavedQueryServiceImpl|Implementation class for saved query service|
+|70|lens.server.savedquery.service.impl|org.apache.lens.server.query.save.SavedQueryServiceImpl|Implementation class for saved query service|
 *--+--+---+--+
-|69|lens.server.savedquery.ws.resource.impl|org.apache.lens.server.query.save.SavedQueryResource|Implementation class for Saved query Resource|
+|71|lens.server.savedquery.ws.resource.impl|org.apache.lens.server.query.save.SavedQueryResource|Implementation class for Saved query Resource|
 *--+--+---+--+
-|70|lens.server.scheduler.service.impl|org.apache.lens.server.scheduler.SchedulerServiceImpl|Implementation class for query scheduler service|
+|72|lens.server.scheduler.service.impl|org.apache.lens.server.scheduler.SchedulerServiceImpl|Implementation class for query scheduler service|
 *--+--+---+--+
-|71|lens.server.scheduler.ws.resource.impl|org.apache.lens.server.scheduler.ScheduleResource|Implementation class for query scheduler resource|
+|73|lens.server.scheduler.ws.resource.impl|org.apache.lens.server.scheduler.ScheduleResource|Implementation class for query scheduler resource|
 *--+--+---+--+
-|72|lens.server.scheduling.queue.poll.interval.millisec|2000|The interval at which submission thread will poll scheduling queue to fetch the next query for submission. If value is less than equal to 0, then it would mean that thread will continuosly poll without sleeping. The interval has to be given in milliseconds.|
+|74|lens.server.scheduling.queue.poll.interval.millisec|2000|The interval at which submission thread will poll scheduling queue to fetch the next query for submission. If value is less than equal to 0, then it would mean that thread will continuosly poll without sleeping. The interval has to be given in milliseconds.|
 *--+--+---+--+
-|73|lens.server.serverMode.ws.filter.impl|org.apache.lens.server.ServerModeFilter|Implementation class for ServerMode Filter|
+|75|lens.server.serverMode.ws.filter.impl|org.apache.lens.server.ServerModeFilter|Implementation class for ServerMode Filter|
 *--+--+---+--+
-|74|lens.server.service.provider.factory|org.apache.lens.server.ServiceProviderFactoryImpl|Service provider factory implementation class. This parameter is used to lookup the factory implementation class name that would provide an instance of ServiceProvider. Users should instantiate the class to obtain its instance. Example -- Class spfClass = conf.getClass("lens.server.service.provider.factory", null, ServiceProviderFactory.class); ServiceProviderFactory spf = spfClass.newInstance(); ServiceProvider serviceProvider = spf.getServiceProvider(); -- This is not supposed to be overridden by users.|
+|76|lens.server.service.provider.factory|org.apache.lens.server.ServiceProviderFactoryImpl|Service provider factory implementation class. This parameter is used to lookup the factory implementation class name that would provide an instance of ServiceProvider. Users should instantiate the class to obtain its instance. Example -- Class spfClass = conf.getClass("lens.server.service.provider.factory", null, ServiceProviderFactory.class); ServiceProviderFactory spf = spfClass.newInstance(); ServiceProvider serviceProvider = spf.getServiceProvider(); -- This is not supposed to be overridden by users.|
 *--+--+---+--+
-|75|lens.server.servicenames|session,query,metastore,scheduler,quota|These services would be started in the specified order when lens-server starts up|
+|77|lens.server.servicenames|session,query,metastore,scheduler,quota|These services would be started in the specified order when lens-server starts up|
 *--+--+---+--+
-|76|lens.server.session.expiry.service.interval.secs|3600|Interval at which lens session expiry service runs|
+|78|lens.server.session.expiry.service.interval.secs|3600|Interval at which lens session expiry service runs|
 *--+--+---+--+
-|77|lens.server.session.service.impl|org.apache.lens.server.session.HiveSessionService|Implementation class for session service|
+|79|lens.server.session.service.impl|org.apache.lens.server.session.HiveSessionService|Implementation class for session service|
 *--+--+---+--+
-|78|lens.server.session.timeout.seconds|86400|Lens session timeout in seconds.If there is no activity on the session for this period then the session will be closed.Default timeout is one day.|
+|80|lens.server.session.timeout.seconds|86400|Lens session timeout in seconds.If there is no activity on the session for this period then the session will be closed.Default timeout is one day.|
 *--+--+---+--+
-|79|lens.server.session.ws.resource.impl|org.apache.lens.server.session.SessionResource|Implementation class for Session Resource|
+|81|lens.server.session.ws.resource.impl|org.apache.lens.server.session.SessionResource|Implementation class for Session Resource|
 *--+--+---+--+
-|80|lens.server.snapshot.interval|300000|Snapshot interval time in miliseconds for saving lens server state.|
+|82|lens.server.snapshot.interval|300000|Snapshot interval time in miliseconds for saving lens server state.|
 *--+--+---+--+
-|81|lens.server.state.persist.out.stream.buffer.size|1048576|Output Stream Buffer Size used in writing lens server state to file system. Size is in bytes.|
+|83|lens.server.state.persist.out.stream.buffer.size|1048576|Output Stream Buffer Size used in writing lens server state to file system. Size is in bytes.|
 *--+--+---+--+
-|82|lens.server.statistics.db|lensstats|Database to which statistics tables are created and partitions are added.|
+|84|lens.server.statistics.db|lensstats|Database to which statistics tables are created and partitions are added.|
 *--+--+---+--+
-|83|lens.server.statistics.log.rollover.interval|3600000|Default rate which log statistics store scans for rollups in milliseconds.|
+|85|lens.server.statistics.log.rollover.interval|3600000|Default rate which log statistics store scans for rollups in milliseconds.|
 *--+--+---+--+
-|84|lens.server.statistics.store.class|org.apache.lens.server.stats.store.log.LogStatisticsStore|Default implementation of class used to persist Lens Statistics.|
+|86|lens.server.statistics.store.class|org.apache.lens.server.stats.store.log.LogStatisticsStore|Default implementation of class used to persist Lens Statistics.|
 *--+--+---+--+
-|85|lens.server.statistics.warehouse.dir|file:///tmp/lens/statistics/warehouse|Default top level location where stats are moved by the log statistics store.|
+|87|lens.server.statistics.warehouse.dir|file:///tmp/lens/statistics/warehouse|Default top level location where stats are moved by the log statistics store.|
 *--+--+---+--+
-|86|lens.server.total.query.cost.ceiling.per.user|-1.0|A query submitted by user will be launched only if total query cost of all current launched queries of user is less than or equal to total query cost ceiling defined by this property. This configuration value is only useful when TotalQueryCostCeilingConstraint is enabled by using org.apache.lens.server.query.constraint.TotalQueryCostCeilingConstraintFactory as one of the factories in lens.server.query.constraint.factories property. Default is -1.0 which means that there is no limit on the total query cost of launched queries submitted by a user.|
+|88|lens.server.total.query.cost.ceiling.per.user|-1.0|A query submitted by user will be launched only if total query cost of all current launched queries of user is less than or equal to total query cost ceiling defined by this property. This configuration value is only useful when TotalQueryCostCeilingConstraint is enabled by using org.apache.lens.server.query.constraint.TotalQueryCostCeilingConstraintFactory as one of the factories in lens.server.query.constraint.factories property. Default is -1.0 which means that there is no limit on the total query cost of launched queries submitted by a user.|
 *--+--+---+--+
-|87|lens.server.ui.base.uri|http://0.0.0.0:19999/|The base url for the Lens UI Server|
+|89|lens.server.ui.base.uri|http://0.0.0.0:19999/|The base url for the Lens UI Server|
 *--+--+---+--+
-|88|lens.server.ui.enable|true|Bringing up the ui server is optional. By default it brings up UI server.|
+|90|lens.server.ui.enable|true|Bringing up the ui server is optional. By default it brings up UI server.|
 *--+--+---+--+
-|89|lens.server.ui.enable.caching|true|Set this to false to disable static file caching in the UI server|
+|91|lens.server.ui.enable.caching|true|Set this to false to disable static file caching in the UI server|
 *--+--+---+--+
-|90|lens.server.ui.static.dir|webapp/lens-server/static|The base directory to server UI static files from|
+|92|lens.server.ui.static.dir|webapp/lens-server/static|The base directory to server UI static files from|
 *--+--+---+--+
-|91|lens.server.user.resolver.custom.class|full.package.name.Classname|Required for CUSTOM user resolver. In case the provided implementations are not sufficient for user config resolver, a custom classname can be provided. Class should extend org.apache.lens.server.user.UserConfigLoader|
+|93|lens.server.user.resolver.custom.class|full.package.name.Classname|Required for CUSTOM user resolver. In case the provided implementations are not sufficient for user config resolver, a custom classname can be provided. Class should extend org.apache.lens.server.user.UserConfigLoader|
 *--+--+---+--+
-|92|lens.server.user.resolver.db.keys|lens.session.cluster.user,mapred.job.queue.name|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loaders, the conf keys that will be loaded from database.|
+|94|lens.server.user.resolver.db.keys|lens.session.cluster.user,mapred.job.queue.name|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loaders, the conf keys that will be loaded from database.|
 *--+--+---+--+
-|93|lens.server.user.resolver.db.query|select clusteruser,queue from user_config_table where username=?|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loader, this query will be run with single argument = logged in user and the result columns will be assigned to lens.server.user.resolver.db.keys in order. For ldap backed database resolver, the argument to this query will be the intermediate values obtained from ldap.|
+|95|lens.server.user.resolver.db.query|select clusteruser,queue from user_config_table where username=?|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loader, this query will be run with single argument = logged in user and the result columns will be assigned to lens.server.user.resolver.db.keys in order. For ldap backed database resolver, the argument to this query will be the intermediate values obtained from ldap.|
 *--+--+---+--+
-|94|lens.server.user.resolver.fixed.value| |Required for FIXED user resolver. when lens.server.user.resolver.type=FIXED, This will be the value cluster user will resolve to.|
+|96|lens.server.user.resolver.fixed.value| |Required for FIXED user resolver. when lens.server.user.resolver.type=FIXED, This will be the value cluster user will resolve to.|
 *--+--+---+--+
-|95|lens.server.user.resolver.ldap.bind.dn| |Required for LDAP_BACKED_DATABASE user resolvers. ldap dn for admin binding example: CN=company-it-admin,ou=service-account,ou=company-service-account,dc=dc1,dc=com...|
+|97|lens.server.user.resolver.ldap.bind.dn| |Required for LDAP_BACKED_DATABASE user resolvers. ldap dn for admin binding example: CN=company-it-admin,ou=service-account,ou=company-service-account,dc=dc1,dc=com...|
 *--+--+---+--+
-|96|lens.server.user.resolver.ldap.bind.password| |Required for LDAP_BACKED_DATABASE user resolvers. ldap password for admin binding above|
+|98|lens.server.user.resolver.ldap.bind.password| |Required for LDAP_BACKED_DATABASE user resolvers. ldap password for admin binding above|
 *--+--+---+--+
-|97|lens.server.user.resolver.ldap.fields|department|Required for LDAP_BACKED_DATABASE user resolvers. list of fields to be obtained from ldap. These will be cached by the intermediate db.|
+|99|lens.server.user.resolver.ldap.fields|department|Required for LDAP_BACKED_DATABASE user resolvers. list of fields to be obtained from ldap. These will be cached by the intermediate db.|
 *--+--+---+--+
-|98|lens.server.user.resolver.ldap.intermediate.db.delete.sql|delete from user_department where username=?|Required for LDAP_BACKED_DATABASE user resolvers. query to delete intermediate values from database backing ldap as cache. one argument: logged in user.|
+|100|lens.server.user.resolver.ldap.intermediate.db.delete.sql|delete from user_department where username=?|Required for LDAP_BACKED_DATABASE user resolvers. query to delete intermediate values from database backing ldap as cache. one argument: logged in user.|
 *--+--+---+--+
-|99|lens.server.user.resolver.ldap.intermediate.db.insert.sql|insert into user_department (username, department, expiry) values (?, ?, ?)|Required for LDAP_BACKED_DATABASE user resolvers. query to insert intermediate values from database backing ldap as cache. arguments: first logged in user, then all intermediate values, then current time + expiration time|
+|101|lens.server.user.resolver.ldap.intermediate.db.insert.sql|insert into user_department (username, department, expiry) values (?, ?, ?)|Required for LDAP_BACKED_DATABASE user resolvers. query to insert intermediate values from database backing ldap as cache. arguments: first logged in user, then all intermediate values, then current time + expiration time|
 *--+--+---+--+
-|100|lens.server.user.resolver.ldap.intermediate.db.query|select department from user_department where username=? and expiry>?|Required for LDAP_BACKED_DATABASE user resolvers. query to obtain intermediate values from database backing ldap as cache. two arguments: logged in user and current time.|
+|102|lens.server.user.resolver.ldap.intermediate.db.query|select department from user_department where username=? and expiry>?|Required for LDAP_BACKED_DATABASE user resolvers. query to obtain intermediate values from database backing ldap as cache. two arguments: logged in user and current time.|
 *--+--+---+--+
-|101|lens.server.user.resolver.ldap.search.base| |Required for LDAP_BACKED_DATABASE user resolvers. for searching intermediate values for a user, the search keys. example: cn=users,dc=dc1,dc=dc2...|
+|103|lens.server.user.resolver.ldap.search.base| |Required for LDAP_BACKED_DATABASE user resolvers. for searching intermediate values for a user, the search keys. example: cn=users,dc=dc1,dc=dc2...|
 *--+--+---+--+
-|102|lens.server.user.resolver.ldap.search.filter|(&(objectClass=user)(sAMAccountName=%s))|Required for LDAP_BACKED_DATABASE user resolvers. filter pattern for ldap search|
+|104|lens.server.user.resolver.ldap.search.filter|(&(objectClass=user)(sAMAccountName=%s))|Required for LDAP_BACKED_DATABASE user resolvers. filter pattern for ldap search|
 *--+--+---+--+
-|103|lens.server.user.resolver.ldap.url| |Required for LDAP_BACKED_DATABASE user resolvers. ldap url to connect to.|
+|105|lens.server.user.resolver.ldap.url| |Required for LDAP_BACKED_DATABASE user resolvers. ldap url to connect to.|
 *--+--+---+--+
-|104|lens.server.user.resolver.propertybased.filename|/path/to/propertyfile|Required for PROPERTYBASED user resolver. when lens.server.user.resolver.type is PROPERTYBASED, then this file will be read and parsed to determine cluster user. Each line should contain username followed by DOT followed by property full name followed by equal-to sign and followed by value. example schema of the file is: user1.lens.server.cluster.user=clusteruser1 user1.mapred.job.queue.name=queue1 *.lens.server.cluster.user=defaultclusteruser *.mapred.job.queue.name=default|
+|106|lens.server.user.resolver.propertybased.filename|/path/to/propertyfile|Required for PROPERTYBASED user resolver. when lens.server.user.resolver.type is PROPERTYBASED, then this file will be read and parsed to determine cluster user. Each line should contain username followed by DOT followed by property full name followed by equal-to sign and followed by value. example schema of the file is: user1.lens.server.cluster.user=clusteruser1 user1.mapred.job.queue.name=queue1 *.lens.server.cluster.user=defaultclusteruser *.mapred.job.queue.name=default|
 *--+--+---+--+
-|105|lens.server.user.resolver.type|FIXED|Type of user config resolver. allowed values are FIXED, PROPERTYBASED, DATABASE, LDAP_BACKED_DATABASE, CUSTOM.|
+|107|lens.server.user.resolver.type|FIXED|Type of user config resolver. allowed values are FIXED, PROPERTYBASED, DATABASE, LDAP_BACKED_DATABASE, CUSTOM.|
 *--+--+---+--+
-|106|lens.server.waiting.queries.selection.policy.factories|org.apache.lens.server.query.collect.UserSpecificWaitingQueriesSelectionPolicyFactory|Factories used to instantiate waiting queries selection policies. Every factory should be an implementation of org.apache.lens.server.api.common.ConfigBasedObjectCreationFactory and create an implementation of org.apache.lens.server.api.query.collect.WaitingQueriesSelectionPolicy.|
+|108|lens.server.waiting.queries.selection.policy.factories|org.apache.lens.server.query.collect.UserSpecificWaitingQueriesSelectionPolicyFactory|Factories used to instantiate waiting queries selection policies. Every factory should be an implementation of org.apache.lens.server.api.common.ConfigBasedObjectCreationFactory and create an implementation of org.apache.lens.server.api.query.collect.WaitingQueriesSelectionPolicy.|
 *--+--+---+--+
-|107|lens.server.ws.featurenames|multipart|These JAX-RS Feature(s) would be started in the specified order when lens-server starts up|
+|109|lens.server.ws.featurenames|multipart,moxyjson,moxyjsonconfigresovler|These JAX-RS Feature(s) would be started in the specified order when lens-server starts up|
 *--+--+---+--+
-|108|lens.server.ws.filternames|authentication,consistentState,serverMode|These JAX-RS filters would be started in the specified order when lens-server starts up|
+|110|lens.server.ws.filternames|authentication,consistentState,serverMode|These JAX-RS filters would be started in the specified order when lens-server starts up|
 *--+--+---+--+
-|109|lens.server.ws.listenernames|appevent|These listeners would be called in the specified order when lens-server starts up|
+|111|lens.server.ws.listenernames|appevent|These listeners would be called in the specified order when lens-server starts up|
 *--+--+---+--+
-|110|lens.server.ws.resourcenames|session,metastore,query,quota,scheduler,index,log|These JAX-RS resources would be started in the specified order when lens-server starts up|
+|112|lens.server.ws.resourcenames|session,metastore,query,quota,scheduler,index,log|These JAX-RS resources would be started in the specified order when lens-server starts up|
 *--+--+---+--+
 The configuration parameters and their default values


[06/51] [abbrv] lens git commit: LENS-851 : Replace columns with aliases in where clause of the inner query

Posted by de...@apache.org.
LENS-851 : Replace columns with aliases in where clause of the inner query


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/bf4c0bec
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/bf4c0bec
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/bf4c0bec

Branch: refs/heads/current-release-line
Commit: bf4c0bec023307417de75f4c13ed1c344fc1f06e
Parents: ff891e2
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Sat Dec 12 15:30:23 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Sat Dec 12 15:30:23 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/lens-errors.conf    | 25 +++++--
 .../lens/cube/error/LensCubeErrorCode.java      |  6 +-
 .../apache/lens/cube/metadata/ExprColumn.java   | 60 ++++++++++-------
 .../lens/cube/metadata/MetastoreUtil.java       | 16 +++++
 .../apache/lens/cube/parse/CandidateFact.java   |  8 +--
 .../apache/lens/cube/parse/GroupbyResolver.java | 15 +----
 .../org/apache/lens/cube/parse/HQLParser.java   | 11 +++-
 .../lens/cube/parse/SingleFactHQLContext.java   |  8 +--
 .../parse/SingleFactMultiStorageHQLContext.java | 68 +++++++++++++++-----
 .../apache/lens/cube/parse/UnionHQLContext.java |  2 +-
 .../cube/metadata/TestCubeMetastoreClient.java  | 10 +--
 .../lens/cube/metadata/TestExprColumn.java      | 20 +++---
 .../apache/lens/cube/parse/CubeTestSetup.java   |  2 +-
 .../lens/cube/parse/TestCubeRewriter.java       |  4 +-
 .../apache/lens/server/metastore/JAXBUtils.java |  8 +--
 15 files changed, 161 insertions(+), 102 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-api/src/main/resources/lens-errors.conf
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/lens-errors.conf b/lens-api/src/main/resources/lens-errors.conf
index ca8562f..c880543 100644
--- a/lens-api/src/main/resources/lens-errors.conf
+++ b/lens-api/src/main/resources/lens-errors.conf
@@ -284,9 +284,15 @@ lensCubeErrorsForQuery = [
   }
 
   {
-      errorCode = 3031
-      httpStatusCode = ${BAD_REQUEST}
-      errorMsg = "The query is answerable from two storages but union is disabled."
+    errorCode = 3031
+    httpStatusCode = ${BAD_REQUEST}
+    errorMsg = "The query is answerable from two storages but union is disabled."
+  }
+
+  {
+    errorCode = 3032
+    httpStatusCode = ${INTERNAL_SERVER_ERROR}
+    errorMsg = "Could not parse expression %s"
   }
 ]
 
@@ -298,10 +304,17 @@ lensCubeErrorsForMetastore = [
   }
 
   {
-      errorCode = 3102
-      httpStatusCode = ${BAD_REQUEST}
-      errorMsg = "No timeline found for fact=%s, storage=%s, update period=%s, partition column=%s."
+    errorCode = 3102
+    httpStatusCode = ${BAD_REQUEST}
+    errorMsg = "No timeline found for fact=%s, storage=%s, update period=%s, partition column=%s."
   }
+
+  {
+    errorCode = 3103
+    httpStatusCode = ${BAD_REQUEST}
+    errorMsg = "The Expression %s is Not Parsable."
+  }
+
 ]
 
 lensCubeErrors = ${lensCubeErrorsForQuery}${lensCubeErrorsForMetastore}

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
index 6c5dc2f..68cd80b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
@@ -54,10 +54,12 @@ public enum LensCubeErrorCode {
   NO_CANDIDATE_FACT_AVAILABLE(3028, 1200),
   NO_CANDIDATE_DIM_STORAGE_TABLES(3029, 1300),
   NO_STORAGE_TABLE_AVAIABLE(3030, 1400),
-  STORAGE_UNION_DISABLED(3031, 100),
+  STORAGE_UNION_DISABLED(3031, 1500),
+  COULD_NOT_PARSE_EXPRESSION(3032, 1500),
   // Error codes greater than 3100 are errors while doing a metastore operation.
   ERROR_IN_ENTITY_DEFINITION(3101, 100),
-  TIMELINE_ABSENT(3102, 100);
+  TIMELINE_ABSENT(3102, 100),
+  EXPRESSION_NOT_PARSABLE(3103, 1500);
 
   public LensErrorInfo getLensErrorInfo() {
     return this.errorInfo;

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/metadata/ExprColumn.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/ExprColumn.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/ExprColumn.java
index b418517..da87e31 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/ExprColumn.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/ExprColumn.java
@@ -23,17 +23,14 @@ import java.io.UnsupportedEncodingException;
 import java.util.*;
 
 import org.apache.lens.cube.parse.HQLParser;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.ParseException;
 
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-import lombok.Setter;
-import lombok.ToString;
+import lombok.*;
 
 public class ExprColumn extends CubeColumn {
   public static final char EXPRESSION_DELIMITER = '|';
@@ -46,11 +43,11 @@ public class ExprColumn extends CubeColumn {
   private int hashCode;
 
   // for backward compatibility
-  public ExprColumn(FieldSchema column, String displayString, String expression) {
+  public ExprColumn(FieldSchema column, String displayString, String expression) throws LensException {
     this(column, displayString, new ExprSpec(expression, null, null));
   }
 
-  public ExprColumn(FieldSchema column, String displayString, ExprSpec... expressions) {
+  public ExprColumn(FieldSchema column, String displayString, ExprSpec... expressions) throws LensException {
     super(column.getName(), column.getComment(), displayString, null, null, 0.0);
 
     if (expressions == null || expressions.length == 0) {
@@ -124,6 +121,7 @@ public class ExprColumn extends CubeColumn {
   public static class ExprSpec {
     @Getter
     @Setter
+    @NonNull
     private String expr;
     @Getter
     @Setter
@@ -136,20 +134,18 @@ public class ExprColumn extends CubeColumn {
     private boolean hasHashCode = false;
     private transient int hashCode;
 
-    public ExprSpec(String expr, Date startTime, Date endTime) {
+    public ExprSpec(@NonNull String expr, Date startTime, Date endTime) throws LensException {
       this.expr = expr;
       this.startTime = startTime;
       this.endTime = endTime;
+      // validation
+      getASTNode();
     }
 
-    public synchronized ASTNode getASTNode() {
+    public synchronized ASTNode getASTNode() throws LensException {
       if (astNode == null) {
-        try {
-          if (StringUtils.isNotBlank(expr)) {
-            astNode = HQLParser.parseExpr(getExpr());
-          }
-        } catch (ParseException e) {
-          throw new IllegalArgumentException("Expression can't be parsed: " + getExpr(), e);
+        if (StringUtils.isNotBlank(expr)) {
+          astNode = MetastoreUtil.parseExpr(getExpr());
         }
       }
       return astNode;
@@ -160,8 +156,14 @@ public class ExprColumn extends CubeColumn {
       if (!hasHashCode) {
         final int prime = 31;
         int result = 1;
-        if (getASTNode() != null) {
-          String exprNormalized = HQLParser.getString(getASTNode());
+        ASTNode astNode;
+        try {
+          astNode = getASTNode();
+        } catch (LensException e) {
+          throw new IllegalArgumentException(e);
+        }
+        if (astNode != null) {
+          String exprNormalized = HQLParser.getString(astNode);
           result = prime * result + exprNormalized.hashCode();
         }
         result = prime * result + ((getStartTime() == null) ? 0 : COLUMN_TIME_FORMAT.get().format(
@@ -262,9 +264,17 @@ public class ExprColumn extends CubeColumn {
       return false;
     }
     // Compare expressions for both - compare ASTs
-    List<ASTNode> myExpressions = getExpressionASTList();
-    List<ASTNode> otherExpressions = other.getExpressionASTList();
-
+    List<ASTNode> myExpressions, otherExpressions;
+    try {
+      myExpressions = getExpressionASTList();
+    } catch (LensException e) {
+      throw new IllegalArgumentException(e);
+    }
+    try {
+      otherExpressions = other.getExpressionASTList();
+    } catch (LensException e) {
+      throw new IllegalArgumentException(e);
+    }
     for (int i = 0; i < myExpressions.size(); i++) {
       if (!HQLParser.equalsAST(myExpressions.get(i), otherExpressions.get(i))) {
         return false;
@@ -316,11 +326,11 @@ public class ExprColumn extends CubeColumn {
    *
    * @return the ast
    */
-  public ASTNode getAst() {
+  public ASTNode getAst() throws LensException {
     return getExpressionASTList().get(0);
   }
 
-  public List<ASTNode> getExpressionASTList() {
+  public List<ASTNode> getExpressionASTList() throws LensException {
     synchronized (expressionSet) {
       if (astNodeList.isEmpty()) {
         for (ExprSpec expr : expressionSet) {
@@ -366,15 +376,15 @@ public class ExprColumn extends CubeColumn {
    * Add an expression to existing set of expressions for this column
    *
    * @param expression
-   * @throws ParseException
+   * @throws LensException
    */
-  public void addExpression(ExprSpec expression) throws ParseException {
+  public void addExpression(ExprSpec expression) throws LensException {
     if (expression == null || expression.getExpr().isEmpty()) {
       throw new IllegalArgumentException("Empty expression not allowed");
     }
 
     // Validate if expression can be correctly parsed
-    HQLParser.parseExpr(expression.getExpr());
+    MetastoreUtil.parseExpr(expression.getExpr());
     synchronized (expressionSet) {
       expressionSet.add(expression);
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
index 4ec049c..deb5368 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
@@ -19,14 +19,20 @@
 
 package org.apache.lens.cube.metadata;
 
+import static org.apache.lens.cube.error.LensCubeErrorCode.EXPRESSION_NOT_PARSABLE;
 import static org.apache.lens.cube.metadata.MetastoreConstants.*;
 
 import java.text.ParseException;
 import java.util.*;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.ParseDriver;
+import org.apache.hadoop.hive.ql.parse.ParseUtils;
 
 import com.google.common.collect.Sets;
 
@@ -536,4 +542,14 @@ public class MetastoreUtil {
     }
     return null;
   }
+  public static ASTNode parseExpr(String expr) throws LensException {
+    ParseDriver driver = new ParseDriver();
+    ASTNode tree;
+    try {
+      tree = driver.parseExpression(expr);
+    } catch (org.apache.hadoop.hive.ql.parse.ParseException e) {
+      throw new LensException(EXPRESSION_NOT_PARSABLE.getLensErrorInfo(), e, e.getMessage(), expr);
+    }
+    return ParseUtils.findRootNonNullToken(tree);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 1884bde..2338ba7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -32,7 +32,6 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.antlr.runtime.CommonToken;
@@ -159,12 +158,7 @@ public class CandidateFact implements CandidateTable {
       TimeRange range = cubeql.getTimeRanges().get(i);
       String rangeWhere = rangeToWhereClause.get(range);
       if (!StringUtils.isBlank(rangeWhere)) {
-        ASTNode rangeAST;
-        try {
-          rangeAST = HQLParser.parseExpr(rangeWhere);
-        } catch (ParseException e) {
-          throw new LensException(e);
-        }
+        ASTNode rangeAST = HQLParser.parseExpr(rangeWhere);
         rangeAST.setParent(timenodes.get(i).parent);
         timenodes.get(i).parent.setChild(timenodes.get(i).childIndex, rangeAST);
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
index 97088a1..da74713 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
@@ -31,7 +31,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.ParseException;
 
 import org.antlr.runtime.CommonToken;
 import org.antlr.runtime.tree.Tree;
@@ -73,12 +72,7 @@ class GroupbyResolver implements ContextRewriter {
 
         if (!groupByExprs.contains(expr)) {
           if (!cubeql.isAggregateExpr(expr)) {
-            ASTNode exprAST;
-            try {
-              exprAST = HQLParser.parseExpr(expr);
-            } catch (ParseException e) {
-              throw new LensException(e);
-            }
+            ASTNode exprAST = HQLParser.parseExpr(expr);
             ASTNode groupbyAST = cubeql.getGroupByAST();
             if (!isConstantsUsed(exprAST)) {
               if (groupbyAST != null) {
@@ -140,12 +134,7 @@ class GroupbyResolver implements ContextRewriter {
     int index = 0;
     for (String expr : groupByExprs) {
       if (!contains(cubeql, selectExprs, expr)) {
-        ASTNode exprAST;
-        try {
-          exprAST = HQLParser.parseExpr(expr);
-        } catch (ParseException e) {
-          throw new LensException(e);
-        }
+        ASTNode exprAST = HQLParser.parseExpr(expr);
         addChildAtIndex(index, cubeql.getSelectAST(), exprAST);
         index++;
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index 9a9d134..7cea7d5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -18,10 +18,10 @@
  */
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.error.LensCubeErrorCode.COULD_NOT_PARSE_EXPRESSION;
 import static org.apache.lens.cube.error.LensCubeErrorCode.SYNTAX_ERROR;
 
 import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.Number;
 
 import java.io.IOException;
 import java.lang.reflect.Field;
@@ -170,9 +170,14 @@ public final class HQLParser {
     return tree;
   }
 
-  public static ASTNode parseExpr(String expr) throws ParseException {
+  public static ASTNode parseExpr(String expr) throws LensException {
     ParseDriver driver = new ParseDriver();
-    ASTNode tree = driver.parseExpression(expr);
+    ASTNode tree;
+    try {
+      tree = driver.parseExpression(expr);
+    } catch (ParseException e) {
+      throw new LensException(COULD_NOT_PARSE_EXPRESSION.getLensErrorInfo(), e, e.getMessage());
+    }
     return ParseUtils.findRootNonNullToken(tree);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
index f7271e5..de52b0a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
@@ -26,7 +26,6 @@ import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.ParseException;
 
 /**
  * HQL context class which passes down all query strings to come from DimOnlyHQLContext and works with fact being
@@ -67,12 +66,7 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
           String rangeWhere = entry.getKey();
 
           if (!StringUtils.isBlank(rangeWhere)) {
-            ASTNode rangeAST;
-            try {
-              rangeAST = HQLParser.parseExpr(rangeWhere);
-            } catch (ParseException e) {
-              throw new LensException(e);
-            }
+            ASTNode rangeAST = HQLParser.parseExpr(rangeWhere);
             rangeAST.setParent(range.getParent());
             range.getParent().setChild(range.getChildIndex(), rangeAST);
           }

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
index 418ef5a..96b1d05 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
@@ -19,7 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
-import static org.apache.lens.cube.parse.HQLParser.getString;
+import static org.apache.lens.cube.parse.HQLParser.*;
 
 import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
@@ -66,7 +66,7 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     @Override
     public int hashCode() {
       if (!hashCodeComputed) {
-        hashCode = HQLParser.getString(ast).hashCode();
+        hashCode = getString(ast).hashCode();
         hashCodeComputed = true;
       }
       return hashCode;
@@ -74,8 +74,8 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
 
     @Override
     public boolean equals(Object o) {
-      return o instanceof HashableASTNode && this.hashCode() == o.hashCode() && HQLParser.getString(this.getAST())
-        .trim().equalsIgnoreCase(HQLParser.getString(((HashableASTNode) o).getAST()).trim());
+      return o instanceof HashableASTNode && this.hashCode() == o.hashCode() && getString(this.getAST())
+        .trim().equalsIgnoreCase(getString(((HashableASTNode) o).getAST()).trim());
     }
   }
 
@@ -86,6 +86,7 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     super(query, fact);
     processSelectAST();
     processGroupByAST();
+    processWhereAST();
     processHavingAST();
     processOrderByAST();
     processLimit();
@@ -94,10 +95,10 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
 
   private void processSelectAST() {
     query.getSelectFinalAliases().clear();
-    ASTNode originalSelectAST = HQLParser.copyAST(query.getSelectAST());
+    ASTNode originalSelectAST = copyAST(query.getSelectAST());
     query.setSelectAST(new ASTNode(originalSelectAST.getToken()));
     ASTNode outerSelectAST = processExpression(originalSelectAST);
-    setSelect(HQLParser.getString(outerSelectAST));
+    setSelect(getString(outerSelectAST));
   }
 
   private void processGroupByAST() {
@@ -106,16 +107,25 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     }
   }
 
+  private void processWhereAST() throws LensException {
+    for (String storageTable : fact.getStorgeWhereClauseMap().keySet()) {
+      ASTNode tree = parseExpr(fact.getStorgeWhereClauseMap().get(storageTable));
+      ASTNode replaced = replaceAST(tree);
+      fact.getStorgeWhereClauseMap().put(storageTable, getString(replaced));
+    }
+  }
+
   private void processHavingAST() throws LensException {
     if (query.getHavingAST() != null) {
-      setHaving(HQLParser.getString(processExpression(query.getHavingAST())));
+      setHaving(getString(processExpression(query.getHavingAST())));
       query.setHavingAST(null);
     }
   }
 
+
   private void processOrderByAST() {
     if (query.getOrderByAST() != null) {
-      setOrderby(HQLParser.getString(processExpression(query.getOrderByAST())));
+      setOrderby(getString(processExpression(query.getOrderByAST())));
       query.setOrderByAST(null);
     }
   }
@@ -124,6 +134,7 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     setLimit(query.getLimitValue());
     query.setLimitValue(null);
   }
+
   /*
   Perform a DFS on the provided AST, and Create an AST of similar structure with changes specific to the
   inner query - outer query dynamics. The resultant AST is supposed to be used in outer query.
@@ -147,11 +158,11 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     if (astNode == null) {
       return null;
     }
-    if (innerToOuterASTs.containsKey(new HashableASTNode(astNode))) {
-      return innerToOuterASTs.get(new HashableASTNode(astNode));
-    }
-    if (HQLParser.isAggregateAST(astNode)) {
-      ASTNode innerSelectASTWithoutAlias = HQLParser.copyAST(astNode);
+    if (isAggregateAST(astNode)) {
+      if (innerToOuterASTs.containsKey(new HashableASTNode(astNode))) {
+        return innerToOuterASTs.get(new HashableASTNode(astNode));
+      }
+      ASTNode innerSelectASTWithoutAlias = copyAST(astNode);
       ASTNode innerSelectExprAST = new ASTNode(new CommonToken(HiveParser.TOK_SELEXPR));
       innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
       String alias = decideAlias(astNode);
@@ -164,8 +175,11 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
       outerAST.addChild(dotAST);
       innerToOuterASTs.put(new HashableASTNode(innerSelectASTWithoutAlias), outerAST);
       return outerAST;
-    } else if (HQLParser.isTableColumnAST(astNode)) {
-      ASTNode innerSelectASTWithoutAlias = HQLParser.copyAST(astNode);
+    } else if (isTableColumnAST(astNode)) {
+      if (innerToOuterASTs.containsKey(new HashableASTNode(astNode))) {
+        return innerToOuterASTs.get(new HashableASTNode(astNode));
+      }
+      ASTNode innerSelectASTWithoutAlias = copyAST(astNode);
       ASTNode innerSelectExprAST = new ASTNode(new CommonToken(HiveParser.TOK_SELEXPR));
       innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
       String alias = decideAlias(astNode);
@@ -186,6 +200,30 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     }
   }
 
+  /**
+   * Transforms the inner query's AST so that aliases are used now instead of column names.
+   * Does so in-place, without creating new ASTNode instances.
+   * @param astNode inner query's AST Node to transform
+   * @return Transformed AST Node.
+   */
+  private ASTNode replaceAST(ASTNode astNode) {
+    if (astNode == null) {
+      return null;
+    }
+    if (isAggregateAST(astNode) || isTableColumnAST(astNode)) {
+      if (innerToOuterASTs.containsKey(new HashableASTNode(astNode))) {
+        ASTNode ret = innerToOuterASTs.get(new HashableASTNode(astNode));
+        // Set parent null for quicker GC
+        astNode.setParent(null);
+        return ret;
+      }
+    }
+    for (int i = 0; i < astNode.getChildCount(); i++) {
+      astNode.setChild(i, replaceAST((ASTNode) astNode.getChild(i)));
+    }
+    return astNode;
+  }
+
   private void addToInnerSelectAST(ASTNode selectExprAST) {
     if (query.getSelectAST() == null) {
       query.setSelectAST(new ASTNode(new CommonToken(TOK_SELECT)));

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
index c9ba561..e6ee989 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
@@ -33,7 +33,7 @@ import lombok.RequiredArgsConstructor;
 @RequiredArgsConstructor
 public abstract class UnionHQLContext extends SimpleHQLContext {
   protected final CubeQueryContext query;
-  private final CandidateFact fact;
+  protected final CandidateFact fact;
 
   List<HQLContextInterface> hqlContexts = new ArrayList<>();
 

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index c6ce6ad..0fef13f 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -43,11 +43,7 @@ import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.Partition;
-import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.metadata.*;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
@@ -305,7 +301,7 @@ public class TestCubeMetastoreClient {
       new DerivedCube(derivedCubeNameWithProps, measures, dimensions, CUBE_PROPERTIES, 0L, cubeWithProps);
   }
 
-  private static void defineUberDims() {
+  private static void defineUberDims() throws LensException {
     // Define zip dimension
     zipAttrs.add(new BaseDimAttribute(new FieldSchema("zipcode", "int", "code")));
     zipAttrs.add(new BaseDimAttribute(new FieldSchema("f1", "string", "field1")));
@@ -411,7 +407,7 @@ public class TestCubeMetastoreClient {
       expr1.setExpr("contact(countrydim.name");
       stateCountryExpr.addExpression(expr1);
       fail("Expected add expression to fail because of syntax error");
-    } catch (ParseException exc) {
+    } catch (LensException exc) {
       // Pass
     }
     city.alterExpression(stateCountryExpr);

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestExprColumn.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestExprColumn.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestExprColumn.java
index 8770f1a..0153b2d 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestExprColumn.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestExprColumn.java
@@ -28,7 +28,9 @@ import java.util.Iterator;
 import java.util.Map;
 import java.util.TimeZone;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 
@@ -142,7 +144,7 @@ public class TestExprColumn {
   }
 
   @Test
-  public void testExprColumnCreationErrors() {
+  public void testExprColumnCreationErrors() throws LensException {
     FieldSchema colSchema = new FieldSchema("errorColumn", "double", "multi exprcol");
 
     // no expression spec passed
@@ -157,16 +159,16 @@ public class TestExprColumn {
     try {
       ExprColumn col1 = new ExprColumn(colSchema, "NoExprInExprSpec", new ExprSpec(null, null, null));
       fail(col1 + " should not be created");
-    } catch (IllegalArgumentException e) {
-      assertTrue(e.getMessage().contains("No expression string specified for column errorColumn at index:0"));
+    } catch (NullPointerException e) {
+      // pass
     }
 
     // Parse error in expr passed in exprspec
     try {
       ExprColumn col1 = new ExprColumn(colSchema, "NoExprInExprSpec", new ExprSpec("(a+b", null, null));
       fail(col1 + " should not be created");
-    } catch (IllegalArgumentException e) {
-      assertTrue(e.getMessage().contains("Expression can't be parsed: (a+b"), e.getMessage());
+    } catch (LensException e) {
+      assertEquals(e.getErrorCode(), LensCubeErrorCode.EXPRESSION_NOT_PARSABLE.getLensErrorInfo().getErrorCode());
     }
 
     // Parse error in expr passed in exprspec
@@ -174,8 +176,8 @@ public class TestExprColumn {
       ExprColumn col1 = new ExprColumn(colSchema, "NoExprInExprSpec", new ExprSpec("a + b", null, null),
         new ExprSpec("(a+b", null, null));
       fail(col1 + " should not be created");
-    } catch (IllegalArgumentException e) {
-      assertTrue(e.getMessage().contains("Expression can't be parsed: (a+b"));
+    } catch (LensException e) {
+      assertEquals(e.getErrorCode(), LensCubeErrorCode.EXPRESSION_NOT_PARSABLE.getLensErrorInfo().getErrorCode());
     }
 
     // no expression passed in exprspec
@@ -183,8 +185,8 @@ public class TestExprColumn {
       ExprColumn col1 = new ExprColumn(colSchema, "NoExprInExprSpecAt1", new ExprSpec("a + b", null, null),
         new ExprSpec(null, null, null));
       fail(col1 + " should not be created");
-    } catch (IllegalArgumentException e) {
-      assertTrue(e.getMessage().contains("No expression string specified for column errorColumn at index:1"));
+    } catch (NullPointerException e) {
+      // pass
     }
 
     // startTime after endTime

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 2a50d74..3f01dbe 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -1561,7 +1561,7 @@ public class CubeTestSetup {
   }
 
   // DimWithTwoStorages
-  private void createCityTable(CubeMetastoreClient client) throws HiveException, ParseException {
+  private void createCityTable(CubeMetastoreClient client) throws HiveException, ParseException, LensException {
     Set<CubeDimAttribute> cityAttrs = new HashSet<CubeDimAttribute>();
     cityAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "code")));
     cityAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "city name")));

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 3be9406..0f05556 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -416,7 +416,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
       conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, true);
 
       hqlQuery = rewrite("select cityid as `City ID`, msr8, msr7 as `Third measure` "
-        + "from testCube where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
+        + "from testCube where cityid = 'a' and zipcode = 'b' and " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
 
       expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT testcube.alias0 as `City ID`, sum(testcube.alias1) + max(testcube.alias2), "
@@ -426,7 +426,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         "select testcube.cityid as `alias0`, sum(testcube.msr2) as `alias1`, "
           + "max(testcube.msr3) as `alias2`, "
           + "sum(case when testcube.cityid = 'x' then testcube.msr21 else testcube.msr22 end) as `alias3`",
-        null, "group by testcube.cityid");
+        "testcube.alias0 = 'a' and testcube.zipcode = 'b'", "group by testcube.cityid");
 
       compareQueries(hqlQuery, expected);
 

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
index a5883f7..817c84c 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
@@ -316,7 +316,7 @@ public final class JAXBUtils {
     return xes;
   }
 
-  private static ExprSpec[] exprSpecFromXExprColumn(Collection<XExprSpec> xesList) {
+  private static ExprSpec[] exprSpecFromXExprColumn(Collection<XExprSpec> xesList) throws LensException {
     List<ExprSpec> esArray = new ArrayList<ExprSpec>(xesList.size());
     for (XExprSpec xes : xesList) {
       esArray.add(new ExprSpec(xes.getExpr(), getDateFromXML(xes.getStartTime()), getDateFromXML(xes.getEndTime())));
@@ -478,7 +478,7 @@ public final class JAXBUtils {
     return jc;
   }
 
-  public static ExprColumn hiveExprColumnFromXExprColumn(XExprColumn xe) {
+  public static ExprColumn hiveExprColumnFromXExprColumn(XExprColumn xe) throws LensException {
     ExprColumn ec = new ExprColumn(new FieldSchema(xe.getName(), xe.getType().toLowerCase(),
       xe.getDescription()),
       xe.getDisplayString(),
@@ -598,7 +598,7 @@ public final class JAXBUtils {
       return null;
     }
 
-    Storage storage = null;
+    Storage storage;
     try {
       Class<?> clazz = Class.forName(xs.getClassname());
       Constructor<?> constructor = clazz.getConstructor(String.class);
@@ -924,7 +924,7 @@ public final class JAXBUtils {
     return ret;
   }
 
-  public static Dimension dimensionFromXDimension(XDimension dimension) {
+  public static Dimension dimensionFromXDimension(XDimension dimension) throws LensException {
     Set<CubeDimAttribute> dims = new LinkedHashSet<CubeDimAttribute>();
     for (XDimAttribute xd : dimension.getAttributes().getDimAttribute()) {
       dims.add(hiveDimAttrFromXDimAttr(xd));


[21/51] [abbrv] lens git commit: LENS-913: Seeing DateUtil exception in all cube tests

Posted by de...@apache.org.
LENS-913: Seeing DateUtil exception in all cube tests


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/c73d5844
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/c73d5844
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/c73d5844

Branch: refs/heads/current-release-line
Commit: c73d5844b6e74eb3184b70aea283bf26b1bd267c
Parents: 71cf9ff
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Fri Jan 8 17:23:19 2016 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Fri Jan 8 17:23:19 2016 +0530

----------------------------------------------------------------------
 .../lens/cube/metadata/CubeFactTable.java       | 45 +++++++++++---------
 .../org/apache/lens/cube/metadata/DateUtil.java |  3 +-
 .../lens/cube/metadata/CubeFactTableTest.java   |  4 ++
 3 files changed, 30 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/c73d5844/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
index dd0adb7..b1fec8c 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
@@ -21,6 +21,7 @@ package org.apache.lens.cube.metadata;
 import java.util.*;
 
 import org.apache.lens.cube.metadata.UpdatePeriod.UpdatePeriodComparator;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -28,7 +29,9 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
 import com.google.common.collect.Lists;
+import lombok.extern.slf4j.Slf4j;
 
+@Slf4j
 public class CubeFactTable extends AbstractCubeTable {
   private String cubeName;
   private final Map<String, Set<UpdatePeriod>> storageUpdatePeriods;
@@ -80,7 +83,7 @@ public class CubeFactTable extends AbstractCubeTable {
   }
 
   private static Map<String, Set<UpdatePeriod>> getUpdatePeriods(String name, Map<String, String> props) {
-    Map<String, Set<UpdatePeriod>> storageUpdatePeriods = new HashMap<String, Set<UpdatePeriod>>();
+    Map<String, Set<UpdatePeriod>> storageUpdatePeriods = new HashMap<>();
     String storagesStr = props.get(MetastoreUtil.getFactStorageListKey(name));
     if (!StringUtils.isBlank(storagesStr)) {
       String[] storages = storagesStr.split(",");
@@ -88,7 +91,7 @@ public class CubeFactTable extends AbstractCubeTable {
         String updatePeriodStr = props.get(MetastoreUtil.getFactUpdatePeriodKey(name, storage));
         if (StringUtils.isNotBlank(updatePeriodStr)) {
           String[] periods = updatePeriodStr.split(",");
-          Set<UpdatePeriod> updatePeriods = new TreeSet<UpdatePeriod>();
+          Set<UpdatePeriod> updatePeriods = new TreeSet<>();
           for (String period : periods) {
             updatePeriods.add(UpdatePeriod.valueOf(period));
           }
@@ -321,20 +324,28 @@ public class CubeFactTable extends AbstractCubeTable {
     getProperties().put(MetastoreConstants.FACT_AGGREGATED_PROPERTY, Boolean.toString(isAggregated));
   }
 
-  public Date getAbsoluteStartTime() {
+  public Date getDateFromProperty(String propKey, boolean relative, boolean start) {
+    String prop = getProperties().get(propKey);
     try {
-      return DateUtil.resolveAbsoluteDate(getProperties().get(MetastoreConstants.FACT_ABSOLUTE_START_TIME));
-    } catch (Exception e) {
-      return new Date(Long.MIN_VALUE);
+      if (StringUtils.isNotBlank(prop)) {
+        if (relative) {
+          return DateUtil.resolveRelativeDate(prop, now());
+        } else {
+          return DateUtil.resolveAbsoluteDate(prop);
+        }
+      }
+    } catch (LensException e) {
+      log.error("unable to parse {} {} date: {}", relative ? "relative" : "absolute", start ? "start" : "end", prop);
     }
+    return start ? DateUtil.MIN_DATE : DateUtil.MAX_DATE;
+  }
+
+  public Date getAbsoluteStartTime() {
+    return getDateFromProperty(MetastoreConstants.FACT_ABSOLUTE_START_TIME, false, true);
   }
 
   public Date getRelativeStartTime() {
-    try {
-      return DateUtil.resolveRelativeDate(getProperties().get(MetastoreConstants.FACT_RELATIVE_START_TIME), now());
-    } catch (Exception e) {
-      return new Date(Long.MIN_VALUE);
-    }
+    return getDateFromProperty(MetastoreConstants.FACT_RELATIVE_START_TIME, true, true);
   }
 
   public Date getStartTime() {
@@ -342,19 +353,11 @@ public class CubeFactTable extends AbstractCubeTable {
   }
 
   public Date getAbsoluteEndTime() {
-    try {
-      return DateUtil.resolveAbsoluteDate(getProperties().get(MetastoreConstants.FACT_ABSOLUTE_END_TIME));
-    } catch (Exception e) {
-      return new Date(Long.MAX_VALUE);
-    }
+    return getDateFromProperty(MetastoreConstants.FACT_ABSOLUTE_END_TIME, false, false);
   }
 
   public Date getRelativeEndTime() {
-    try {
-      return DateUtil.resolveRelativeDate(getProperties().get(MetastoreConstants.FACT_RELATIVE_END_TIME), now());
-    } catch (Exception e) {
-      return new Date(Long.MAX_VALUE);
-    }
+    return getDateFromProperty(MetastoreConstants.FACT_RELATIVE_END_TIME, true, false);
   }
 
   public Date getEndTime() {

http://git-wip-us.apache.org/repos/asf/lens/blob/c73d5844/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
index b76c567..b82cd95 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
@@ -54,7 +54,8 @@ public final class DateUtil {
    * <NUM>UNIT or Hardcoded dates in DD-MM-YYYY hh:mm:ss,sss
    */
   public static final String UNIT;
-
+  public static final Date MAX_DATE = new Date(Long.MAX_VALUE);
+  public static final Date MIN_DATE = new Date(Long.MIN_VALUE);
   static {
     StringBuilder sb = new StringBuilder();
     String sep = "";

http://git-wip-us.apache.org/repos/asf/lens/blob/c73d5844/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java
index 25eaaef..0935509 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java
@@ -18,6 +18,8 @@
  */
 package org.apache.lens.cube.metadata;
 
+import static org.mockito.Matchers.anyBoolean;
+import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 import static org.testng.Assert.assertEquals;
@@ -66,6 +68,8 @@ public class CubeFactTableTest {
 
     when(cubeFactTable.getProperties()).thenReturn(properties);
 
+    when(cubeFactTable.getDateFromProperty(anyString(), anyBoolean(), anyBoolean())).thenCallRealMethod();
+
     when(cubeFactTable.getRelativeStartTime()).thenCallRealMethod();
     when(cubeFactTable.getAbsoluteStartTime()).thenCallRealMethod();
 


[26/51] [abbrv] lens git commit: LENS-735 : Remove accepting TableReferences for ReferenceDimAttribute

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
index 1a83d09..33ec9d9 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
@@ -28,7 +28,9 @@ import org.apache.lens.cube.error.ColUnAvailableInTimeRange;
 import org.apache.lens.cube.error.ColUnAvailableInTimeRangeException;
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
+import org.apache.lens.cube.metadata.join.JoinPath;
 import org.apache.lens.cube.parse.DenormalizationResolver.ReferencedQueriedColumn;
+import org.apache.lens.cube.parse.join.AutoJoinContext;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
@@ -197,14 +199,14 @@ class TimerangeResolver implements ContextRewriter {
         if (!column.isColumnAvailableInTimeRange(range)) {
           log.info("Timerange queried is not in column life for {}, Removing join paths containing the column", column);
           // Remove join paths containing this column
-          Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths = joinContext.getAllPaths();
+          Map<Aliased<Dimension>, List<JoinPath>> allPaths = joinContext.getAllPaths();
 
           for (Aliased<Dimension> dimension : allPaths.keySet()) {
-            List<SchemaGraph.JoinPath> joinPaths = allPaths.get(dimension);
-            Iterator<SchemaGraph.JoinPath> joinPathIterator = joinPaths.iterator();
+            List<JoinPath> joinPaths = allPaths.get(dimension);
+            Iterator<JoinPath> joinPathIterator = joinPaths.iterator();
 
             while (joinPathIterator.hasNext()) {
-              SchemaGraph.JoinPath path = joinPathIterator.next();
+              JoinPath path = joinPathIterator.next();
               if (path.containsColumnOfTable(col, (AbstractCubeTable) cubeql.getCube())) {
                 log.info("Removing join path: {} as columns :{} is not available in the range", path, col);
                 joinPathIterator.remove();

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
new file mode 100644
index 0000000..993955a
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
@@ -0,0 +1,719 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse.join;
+
+import java.util.*;
+
+import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.metadata.*;
+import org.apache.lens.cube.metadata.join.JoinPath;
+import org.apache.lens.cube.metadata.join.TableRelationship;
+import org.apache.lens.cube.parse.*;
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.ql.parse.JoinType;
+
+import lombok.Getter;
+import lombok.Setter;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * Store join chain information resolved by join resolver
+ */
+@Slf4j
+public class AutoJoinContext {
+  // Map of a joined table to list of all possible paths from that table to
+  // the target
+  private final Map<Aliased<Dimension>, List<JoinPath>> allPaths;
+  private Set<Dimension> requiredDimensions;
+  @Getter
+  // Map of joined table to the join type (if provided by user)
+  private final Map<AbstractCubeTable, JoinType> tableJoinTypeMap;
+
+  // True if joins were resolved automatically
+  private boolean joinsResolved;
+  // Target table for the auto join resolver
+  private final AbstractCubeTable autoJoinTarget;
+  // Configuration string to control join type
+  private String joinTypeCfg;
+
+  // Map of a joined table to its columns which are part of any of the join
+  // paths. This is used in candidate table resolver
+  @Getter
+  private Map<Dimension, Map<AbstractCubeTable, List<String>>> joinPathFromColumns = new HashMap<>();
+
+  @Getter
+  private Map<Dimension, Map<AbstractCubeTable, List<String>>> joinPathToColumns = new HashMap<>();
+
+  // there can be separate join clause for each fact in-case of multi fact queries
+  @Getter
+  Map<CandidateFact, JoinClause> factClauses = new HashMap<>();
+  @Getter
+  @Setter
+  JoinClause minCostClause;
+  private final boolean flattenBridgeTables;
+  private final String bridgeTableFieldAggr;
+
+  public AutoJoinContext(Map<Aliased<Dimension>, List<JoinPath>> allPaths,
+                         Set<Dimension> requiredDimensions,
+                         Map<AbstractCubeTable, JoinType> tableJoinTypeMap,
+                         AbstractCubeTable autoJoinTarget, String joinTypeCfg, boolean joinsResolved,
+                         boolean flattenBridgeTables, String bridgeTableFieldAggr) {
+    this.allPaths = allPaths;
+    this.requiredDimensions = requiredDimensions;
+    initJoinPathColumns();
+    this.tableJoinTypeMap = tableJoinTypeMap;
+    this.autoJoinTarget = autoJoinTarget;
+    this.joinTypeCfg = joinTypeCfg;
+    this.joinsResolved = joinsResolved;
+    this.flattenBridgeTables = flattenBridgeTables;
+    this.bridgeTableFieldAggr = bridgeTableFieldAggr;
+    log.debug("All join paths:{}", allPaths);
+    log.debug("Join path from columns:{}", joinPathFromColumns);
+    log.debug("Join path to columns:{}", joinPathToColumns);
+  }
+
+  public AbstractCubeTable getAutoJoinTarget() {
+    return autoJoinTarget;
+  }
+
+  private JoinClause getJoinClause(CandidateFact fact) {
+    if (fact == null || !factClauses.containsKey(fact)) {
+      return minCostClause;
+    }
+    return factClauses.get(fact);
+  }
+
+  // Populate map of tables to their columns which are present in any of the
+  // join paths
+  private void initJoinPathColumns() {
+    for (List<JoinPath> paths : allPaths.values()) {
+      for (int i = 0; i < paths.size(); i++) {
+        JoinPath jp = paths.get(i);
+        jp.initColumnsForTable();
+      }
+    }
+    refreshJoinPathColumns();
+  }
+
+  public void refreshJoinPathColumns() {
+    joinPathFromColumns.clear();
+    joinPathToColumns.clear();
+    for (Map.Entry<Aliased<Dimension>, List<JoinPath>> joinPathEntry : allPaths.entrySet()) {
+      List<JoinPath> joinPaths = joinPathEntry.getValue();
+      Map<AbstractCubeTable, List<String>> fromColPaths = joinPathFromColumns.get(joinPathEntry.getKey().getObject());
+      Map<AbstractCubeTable, List<String>> toColPaths = joinPathToColumns.get(joinPathEntry.getKey().getObject());
+      if (fromColPaths == null) {
+        fromColPaths = new HashMap<>();
+        joinPathFromColumns.put(joinPathEntry.getKey().getObject(), fromColPaths);
+      }
+
+      if (toColPaths == null) {
+        toColPaths = new HashMap<>();
+        joinPathToColumns.put(joinPathEntry.getKey().getObject(), toColPaths);
+      }
+      populateJoinPathCols(joinPaths, fromColPaths, toColPaths);
+    }
+  }
+
+  private void populateJoinPathCols(List<JoinPath> joinPaths,
+    Map<AbstractCubeTable, List<String>> fromPathColumns, Map<AbstractCubeTable, List<String>> toPathColumns) {
+    for (JoinPath path : joinPaths) {
+      for (TableRelationship edge : path.getEdges()) {
+        AbstractCubeTable fromTable = edge.getFromTable();
+        String fromColumn = edge.getFromColumn();
+        List<String> columnsOfFromTable = fromPathColumns.get(fromTable);
+        if (columnsOfFromTable == null) {
+          columnsOfFromTable = new ArrayList<>();
+          fromPathColumns.put(fromTable, columnsOfFromTable);
+        }
+        columnsOfFromTable.add(fromColumn);
+
+        // Similarly populate for the 'to' table
+        AbstractCubeTable toTable = edge.getToTable();
+        String toColumn = edge.getToColumn();
+        List<String> columnsOfToTable = toPathColumns.get(toTable);
+        if (columnsOfToTable == null) {
+          columnsOfToTable = new ArrayList<>();
+          toPathColumns.put(toTable, columnsOfToTable);
+        }
+        columnsOfToTable.add(toColumn);
+      }
+    }
+  }
+
+  public void removeJoinedTable(Dimension dim) {
+    allPaths.remove(Aliased.create(dim));
+    joinPathFromColumns.remove(dim);
+  }
+
+  public String getFromString(String fromTable, CandidateFact fact, Set<Dimension> qdims,
+    Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql) throws LensException {
+    String fromString = fromTable;
+    log.info("All paths dump:{}", cubeql.getAutoJoinCtx().getAllPaths());
+    if (qdims == null || qdims.isEmpty()) {
+      return fromString;
+    }
+    // Compute the merged join clause string for the min cost joinClause
+    String clause = getMergedJoinClause(cubeql, cubeql.getAutoJoinCtx().getJoinClause(fact), dimsToQuery);
+
+    fromString += clause;
+    return fromString;
+  }
+
+  // Some refactoring needed to account for multiple join paths
+  public String getMergedJoinClause(CubeQueryContext cubeql, JoinClause joinClause,
+                                    Map<Dimension, CandidateDim> dimsToQuery) {
+    Set<String> clauses = new LinkedHashSet<>();
+    String joinTypeStr = "";
+    JoinType joinType = JoinType.INNER;
+
+    if (StringUtils.isNotBlank(joinTypeCfg)) {
+      joinType = JoinType.valueOf(joinTypeCfg.toUpperCase());
+      joinTypeStr = JoinUtils.getJoinTypeStr(joinType);
+    }
+
+    Iterator<JoinTree> iter = joinClause.getJoinTree().dft();
+    boolean hasBridgeTable = false;
+    boolean initedBridgeClauses = false;
+    StringBuilder bridgeSelectClause = new StringBuilder();
+    StringBuilder bridgeFromClause = new StringBuilder();
+    StringBuilder bridgeFilterClause = new StringBuilder();
+    StringBuilder bridgeJoinClause = new StringBuilder();
+    StringBuilder bridgeGroupbyClause = new StringBuilder();
+
+    while (iter.hasNext()) {
+      JoinTree cur = iter.next();
+      TableRelationship rel = cur.parentRelationship;
+      String toAlias, fromAlias;
+      fromAlias = cur.parent.getAlias();
+      toAlias = cur.getAlias();
+      hasBridgeTable = flattenBridgeTables && (hasBridgeTable || rel.isMapsToMany());
+      // We have to push user specified filters for the joined tables
+      String userFilter = null;
+      // Partition condition on the tables also needs to be pushed depending
+      // on the join
+      String storageFilter = null;
+
+      if (JoinType.INNER == joinType || JoinType.LEFTOUTER == joinType || JoinType.LEFTSEMI == joinType) {
+        // For inner and left joins push filter of right table
+        storageFilter = getStorageFilter(dimsToQuery, rel.getToTable(), toAlias);
+        dimsToQuery.get(rel.getToTable()).setWhereClauseAdded(toAlias);
+      } else if (JoinType.RIGHTOUTER == joinType) {
+        // For right outer joins, push filters of left table
+        if (rel.getFromTable() instanceof Dimension) {
+          storageFilter = getStorageFilter(dimsToQuery, rel.getFromTable(), fromAlias);
+          dimsToQuery.get(rel.getFromTable()).setWhereClauseAdded(fromAlias);
+        }
+      } else if (JoinType.FULLOUTER == joinType) {
+        // For full outer we need to push filters of both left and right
+        // tables in the join clause
+        String leftFilter = null, rightFilter = null;
+        String leftStorageFilter = null, rightStorgeFilter = null;
+
+        if (rel.getFromTable() instanceof Dimension) {
+          leftStorageFilter = getStorageFilter(dimsToQuery, rel.getFromTable(), fromAlias);
+          if (StringUtils.isNotBlank((leftStorageFilter))) {
+            dimsToQuery.get(rel.getFromTable()).setWhereClauseAdded(fromAlias);
+          }
+        }
+
+        rightStorgeFilter = getStorageFilter(dimsToQuery, rel.getToTable(), toAlias);
+        if (StringUtils.isNotBlank(rightStorgeFilter)) {
+          if (StringUtils.isNotBlank((leftStorageFilter))) {
+            leftStorageFilter += " and ";
+          }
+          dimsToQuery.get(rel.getToTable()).setWhereClauseAdded(toAlias);
+        }
+
+        userFilter = (leftFilter == null ? "" : leftFilter) + (rightFilter == null ? "" : rightFilter);
+        storageFilter =
+          (leftStorageFilter == null ? "" : leftStorageFilter)
+            + (rightStorgeFilter == null ? "" : rightStorgeFilter);
+      }
+      StringBuilder clause = new StringBuilder();
+
+      // if a bridge table is present in the path
+      if (hasBridgeTable) {
+        // if any relation has bridge table, the clause becomes the following :
+        // join (" select " + joinkey + " aggr over fields from bridge table + from bridgeTable + [where user/storage
+        // filters] + groupby joinkey) on joincond"
+        // Or
+        // " join (select " + joinkey + " aggr over fields from table reached through bridge table + from bridge table
+        // join <next tables> on join condition + [and user/storage filters] + groupby joinkey) on joincond
+        if (!initedBridgeClauses) {
+          // we just found a bridge table in the path we need to initialize the clauses for subquery required for
+          // aggregating fields of bridge table
+          // initiliaze select clause with join key
+          bridgeSelectClause.append(" (select ").append(toAlias).append(".").append(rel.getToColumn()).append(" as ")
+          .append(rel.getToColumn());
+          // group by join key
+          bridgeGroupbyClause.append(" group by ").append(toAlias).append(".").append(rel.getToColumn());
+          // from clause with bridge table
+          bridgeFromClause.append(" from ").append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
+          // we need to initialize filter clause with user filter clause or storage filter if applicable
+          if (StringUtils.isNotBlank(userFilter)) {
+            bridgeFilterClause.append(userFilter);
+          }
+          if (StringUtils.isNotBlank(storageFilter)) {
+            if (StringUtils.isNotBlank(bridgeFilterClause.toString())) {
+              bridgeFilterClause.append(" and ");
+            }
+            bridgeFilterClause.append(storageFilter);
+          }
+          // initialize final join clause
+          bridgeJoinClause.append(" on ").append(fromAlias).append(".")
+            .append(rel.getFromColumn()).append(" = ").append("%s")
+            .append(".").append(rel.getToColumn());
+          initedBridgeClauses = true;
+        } else {
+          // if bridge clauses are already inited, this is a next table getting joined with bridge table
+          // we will append a simple join clause
+          bridgeFromClause.append(joinTypeStr).append(" join ");
+          bridgeFromClause.append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
+          bridgeFromClause.append(" on ").append(fromAlias).append(".")
+            .append(rel.getFromColumn()).append(" = ").append(toAlias)
+            .append(".").append(rel.getToColumn());
+
+          if (StringUtils.isNotBlank(userFilter)) {
+            bridgeFromClause.append(" and ").append(userFilter);
+          }
+          if (StringUtils.isNotBlank(storageFilter)) {
+            bridgeFromClause.append(" and ").append(storageFilter);
+          }
+        }
+        if (cubeql.getTblAliasToColumns().get(toAlias) != null
+          && !cubeql.getTblAliasToColumns().get(toAlias).isEmpty()) {
+          // there are fields selected from this table after seeing bridge table in path
+          // we should make subQuery for this selection
+          clause.append(joinTypeStr).append(" join ");
+          clause.append(bridgeSelectClause.toString());
+          for (String col : cubeql.getTblAliasToColumns().get(toAlias)) {
+            clause.append(",").append(bridgeTableFieldAggr).append("(").append(toAlias)
+              .append(".").append(col)
+              .append(")")
+              .append(" as ").append(col);
+          }
+          String bridgeFrom = bridgeFromClause.toString();
+          clause.append(bridgeFrom);
+          String bridgeFilter = bridgeFilterClause.toString();
+          if (StringUtils.isNotBlank(bridgeFilter)) {
+            if (bridgeFrom.contains(" join ")) {
+              clause.append(" and ");
+            } else {
+              clause.append(" where");
+            }
+            clause.append(bridgeFilter);
+          }
+          clause.append(bridgeGroupbyClause.toString());
+          clause.append(") ").append(toAlias);
+          clause.append(String.format(bridgeJoinClause.toString(), toAlias));
+          clauses.add(clause.toString());
+        }
+        if (cur.getSubtrees().isEmpty()) {
+          // clear bridge flags and builders, as there are no more clauses in this tree.
+          hasBridgeTable = false;
+          initedBridgeClauses = false;
+          bridgeSelectClause.setLength(0);
+          bridgeFromClause.setLength(0);
+          bridgeFilterClause.setLength(0);
+          bridgeJoinClause.setLength(0);
+          bridgeGroupbyClause.setLength(0);
+        }
+      } else {
+        // Simple join clause is :
+        // joinType + " join " + destTable + " on " + joinCond + [" and" + userFilter] + ["and" + storageFilter]
+        clause.append(joinTypeStr).append(" join ");
+        //Add storage table name followed by alias
+        clause.append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
+        clause.append(" on ").append(fromAlias).append(".")
+          .append(rel.getFromColumn()).append(" = ").append(toAlias)
+          .append(".").append(rel.getToColumn());
+
+        if (StringUtils.isNotBlank(userFilter)) {
+          clause.append(" and ").append(userFilter);
+        }
+        if (StringUtils.isNotBlank(storageFilter)) {
+          clause.append(" and ").append(storageFilter);
+        }
+        clauses.add(clause.toString());
+      }
+    }
+    return StringUtils.join(clauses, "");
+  }
+
+  public Set<Dimension> getDimsOnPath(Map<Aliased<Dimension>, List<TableRelationship>> joinChain,
+    Set<Dimension> qdims) {
+    Set<Dimension> dimsOnPath = new HashSet<>();
+    for (Map.Entry<Aliased<Dimension>, List<TableRelationship>> entry : joinChain.entrySet()) {
+      List<TableRelationship> chain = entry.getValue();
+      Dimension table = entry.getKey().getObject();
+
+      // check if join with this dimension is required
+      if (!qdims.contains(table)) {
+        continue;
+      }
+
+      for (int i = chain.size() - 1; i >= 0; i--) {
+        TableRelationship rel = chain.get(i);
+        dimsOnPath.add((Dimension) rel.getToTable());
+      }
+    }
+    return dimsOnPath;
+  }
+
+  private String getStorageFilter(Map<Dimension, CandidateDim> dimsToQuery, AbstractCubeTable table, String alias) {
+    String whereClause = "";
+    if (dimsToQuery != null && dimsToQuery.get(table) != null) {
+      if (StringUtils.isNotBlank(dimsToQuery.get(table).getWhereClause())) {
+        whereClause = dimsToQuery.get(table).getWhereClause();
+        if (alias != null) {
+          whereClause = StorageUtil.getWhereClause(whereClause, alias);
+        }
+      }
+    }
+    return whereClause;
+  }
+
+  /**
+   * @return the joinsResolved
+   */
+  public boolean isJoinsResolved() {
+    return joinsResolved;
+  }
+
+  // Includes both queried join paths and optional join paths
+  public Set<String> getAllJoinPathColumnsOfTable(AbstractCubeTable table) {
+    Set<String> allPaths = new HashSet<>();
+    for (Map<AbstractCubeTable, List<String>> optPaths : joinPathFromColumns.values()) {
+      if (optPaths.get(table) != null) {
+        allPaths.addAll(optPaths.get(table));
+      }
+    }
+
+    for (Map<AbstractCubeTable, List<String>> optPaths : joinPathToColumns.values()) {
+      if (optPaths.get(table) != null) {
+        allPaths.addAll(optPaths.get(table));
+      }
+    }
+
+    return allPaths;
+  }
+
+  public void pruneAllPaths(CubeInterface cube, final Set<CandidateFact> cfacts,
+    final Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
+    // Remove join paths which cannot be satisfied by the resolved candidate
+    // fact and dimension tables
+    if (cfacts != null) {
+      // include columns from all picked facts
+      Set<String> factColumns = new HashSet<>();
+      for (CandidateFact cFact : cfacts) {
+        factColumns.addAll(cFact.getColumns());
+      }
+
+      for (List<JoinPath> paths : allPaths.values()) {
+        for (int i = 0; i < paths.size(); i++) {
+          JoinPath jp = paths.get(i);
+          List<String> cubeCols = jp.getColumnsForTable((AbstractCubeTable) cube);
+          if (cubeCols != null && !factColumns.containsAll(cubeCols)) {
+            // This path requires some columns from the cube which are not
+            // present in the candidate fact
+            // Remove this path
+            log.info("Removing join path:{} as columns :{} dont exist", jp, cubeCols);
+            paths.remove(i);
+            i--;
+          }
+        }
+      }
+      pruneEmptyPaths(allPaths);
+    }
+    pruneAllPaths(dimsToQuery);
+  }
+
+  /**
+   * Prunes allPaths by removing paths which contain columns that are not present in any candidate dims.
+   *
+   * @param candidateDims candidate dimensions
+   */
+  public void pruneAllPathsForCandidateDims(Map<Dimension, Set<CandidateDim>> candidateDims) throws LensException {
+    Map<Dimension, Set<String>> dimColumns = new HashMap<>();
+    // populate all columns present in candidate dims for each dimension
+    for (Map.Entry<Dimension, Set<CandidateDim>> entry : candidateDims.entrySet()) {
+      Dimension dim = entry.getKey();
+      Set<String> allColumns = new HashSet<>();
+      for (CandidateDim cdim : entry.getValue()) {
+        allColumns.addAll(cdim.getColumns());
+      }
+      dimColumns.put(dim, allColumns);
+    }
+    for (List<JoinPath> paths : allPaths.values()) {
+      for (int i = 0; i < paths.size(); i++) {
+        JoinPath jp = paths.get(i);
+        for (AbstractCubeTable refTable : jp.getAllTables()) {
+          List<String> cols = jp.getColumnsForTable(refTable);
+          if (refTable instanceof Dimension) {
+            if (cols != null && (dimColumns.get(refTable) == null || !dimColumns.get(refTable).containsAll(cols))) {
+              // This path requires some columns from the cube which are not present in any candidate dim
+              // Remove this path
+              log.info("Removing join path:{} as columns :{} don't exist", jp, cols);
+              paths.remove(i);
+              i--;
+              break;
+            }
+          }
+        }
+      }
+    }
+    pruneEmptyPaths(allPaths);
+  }
+
+  private void pruneEmptyPaths(Map<Aliased<Dimension>, List<JoinPath>> allPaths) throws LensException {
+    Iterator<Map.Entry<Aliased<Dimension>, List<JoinPath>>> iter = allPaths.entrySet().iterator();
+    Set<Dimension> noPathDims = new HashSet<>();
+    while (iter.hasNext()) {
+      Map.Entry<Aliased<Dimension>, List<JoinPath>> entry = iter.next();
+      if (entry.getValue().isEmpty()) {
+        noPathDims.add(entry.getKey().getObject());
+        iter.remove();
+      }
+    }
+    noPathDims.retainAll(requiredDimensions);
+
+    if (!noPathDims.isEmpty()) {
+      throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getLensErrorInfo(), autoJoinTarget.getName(),
+        noPathDims.toString());
+    }
+  }
+
+  private Map<Aliased<Dimension>, List<JoinPath>> pruneFactPaths(CubeInterface cube,
+    final CandidateFact cFact) throws LensException {
+    Map<Aliased<Dimension>, List<JoinPath>> prunedPaths = new HashMap<>();
+    // Remove join paths which cannot be satisfied by the candidate fact
+    for (Map.Entry<Aliased<Dimension>, List<JoinPath>> ppaths : allPaths.entrySet()) {
+      prunedPaths.put(ppaths.getKey(), new ArrayList<>(ppaths.getValue()));
+      List<JoinPath> paths = prunedPaths.get(ppaths.getKey());
+      for (int i = 0; i < paths.size(); i++) {
+        JoinPath jp = paths.get(i);
+        List<String> cubeCols = jp.getColumnsForTable((AbstractCubeTable) cube);
+        if (cubeCols != null && !cFact.getColumns().containsAll(cubeCols)) {
+          // This path requires some columns from the cube which are not
+          // present in the candidate fact
+          // Remove this path
+          log.info("Removing join path:{} as columns :{} don't exist", jp, cubeCols);
+          paths.remove(i);
+          i--;
+        }
+      }
+    }
+    pruneEmptyPaths(prunedPaths);
+    return prunedPaths;
+  }
+
+  private void pruneAllPaths(final Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
+    // Remove join paths which cannot be satisfied by the resolved dimension
+    // tables
+    if (dimsToQuery != null && !dimsToQuery.isEmpty()) {
+      for (CandidateDim candidateDim : dimsToQuery.values()) {
+        Set<String> dimCols = candidateDim.getTable().getAllFieldNames();
+        for (List<JoinPath> paths : allPaths.values()) {
+          for (int i = 0; i < paths.size(); i++) {
+            JoinPath jp = paths.get(i);
+            List<String> candidateDimCols = jp.getColumnsForTable(candidateDim.getBaseTable());
+            if (candidateDimCols != null && !dimCols.containsAll(candidateDimCols)) {
+              // This path requires some columns from the dimension which are
+              // not present in the candidate dim
+              // Remove this path
+              log.info("Removing join path:{} as columns :{} dont exist", jp, candidateDimCols);
+              paths.remove(i);
+              i--;
+            }
+          }
+        }
+      }
+      pruneEmptyPaths(allPaths);
+    }
+  }
+
+  /**
+   * There can be multiple join paths between a dimension and the target. Set of all possible join clauses is the
+   * cartesian product of join paths of all dimensions
+   */
+  private Iterator<JoinClause> getJoinClausesForAllPaths(final CandidateFact fact,
+    final Set<Dimension> qDims, final CubeQueryContext cubeql) throws LensException {
+    Map<Aliased<Dimension>, List<JoinPath>> allPaths;
+    // if fact is passed only look at paths possible from fact to dims
+    if (fact != null) {
+      allPaths = pruneFactPaths(cubeql.getCube(), fact);
+    } else {
+      allPaths = new LinkedHashMap<>(this.allPaths);
+    }
+    // prune allPaths with qdims
+    pruneAllPathsWithQueriedDims(allPaths, qDims);
+
+    // Number of paths in each path set
+    final int[] groupSizes = new int[allPaths.values().size()];
+    // Total number of elements in the cartesian product
+    int numSamples = 1;
+    // All path sets
+    final List<List<JoinPath>> pathSets = new ArrayList<>();
+    // Dimension corresponding to the path sets
+    final List<Aliased<Dimension>> dimensions = new ArrayList<>(groupSizes.length);
+
+    int i = 0;
+    for (Map.Entry<Aliased<Dimension>, List<JoinPath>> entry : allPaths.entrySet()) {
+      dimensions.add(entry.getKey());
+      List<JoinPath> group = entry.getValue();
+      pathSets.add(group);
+      groupSizes[i] = group.size();
+      numSamples *= groupSizes[i];
+      i++;
+    }
+
+    final int[] selection = new int[groupSizes.length];
+    final int MAX_SAMPLE_COUNT = numSamples;
+
+    // Return a lazy iterator over all possible join chains
+    return new Iterator<JoinClause>() {
+      int sample = 0;
+
+      @Override
+      public boolean hasNext() {
+        return sample < MAX_SAMPLE_COUNT;
+      }
+
+      @Override
+      public JoinClause next() {
+        Map<Aliased<Dimension>, List<TableRelationship>> chain = new LinkedHashMap<>();
+        //generate next permutation.
+        for (int i = groupSizes.length - 1, base = sample; i >= 0; base /= groupSizes[i], i--) {
+          selection[i] = base % groupSizes[i];
+        }
+        for (int i = 0; i < selection.length; i++) {
+          int selectedPath = selection[i];
+          List<TableRelationship> path = pathSets.get(i).get(selectedPath).getEdges();
+          chain.put(dimensions.get(i), path);
+        }
+
+        Set<Dimension> dimsOnPath = getDimsOnPath(chain, qDims);
+
+        sample++;
+        // Cost of join = number of tables joined in the clause
+        return new JoinClause(cubeql, chain, dimsOnPath);
+      }
+
+      @Override
+      public void remove() {
+        throw new UnsupportedOperationException("Cannot remove elements!");
+      }
+    };
+  }
+
+  /**
+   * Given allPaths, it will remove entries where key is a non-join chain dimension and not contained in qdims
+   *
+   * @param allPaths All join paths
+   * @param qDims queried dimensions
+   */
+  private void pruneAllPathsWithQueriedDims(Map<Aliased<Dimension>, List<JoinPath>> allPaths,
+    Set<Dimension> qDims) {
+    Iterator<Map.Entry<Aliased<Dimension>, List<JoinPath>>> iterator = allPaths.entrySet().iterator();
+    while (iterator.hasNext()) {
+      Map.Entry<Aliased<Dimension>, List<JoinPath>> cur = iterator.next();
+      if (!qDims.contains(cur.getKey().getObject())) {
+        log.info("removing from allPaths: {}", cur);
+        iterator.remove();
+      }
+    }
+  }
+
+  public Set<Dimension> pickOptionalTables(final CandidateFact fact,
+    Set<Dimension> qdims, CubeQueryContext cubeql) throws LensException {
+    // Find the min cost join clause and add dimensions in the clause as optional dimensions
+    Set<Dimension> joiningOptionalTables = new HashSet<>();
+    if (qdims == null) {
+      return joiningOptionalTables;
+    }
+    // find least cost path
+    Iterator<JoinClause> itr = getJoinClausesForAllPaths(fact, qdims, cubeql);
+    JoinClause minCostClause = null;
+    while (itr.hasNext()) {
+      JoinClause clause = itr.next();
+      if (minCostClause == null || minCostClause.getCost() > clause.getCost()) {
+        minCostClause = clause;
+      }
+    }
+
+    if (minCostClause == null) {
+      throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getLensErrorInfo(),
+          qdims.toString(), autoJoinTarget.getName());
+    }
+
+    log.info("Fact: {} minCostClause:{}", fact, minCostClause);
+    if (fact != null) {
+      cubeql.getAutoJoinCtx().getFactClauses().put(fact, minCostClause);
+    } else {
+      cubeql.getAutoJoinCtx().setMinCostClause(minCostClause);
+    }
+    for (Dimension dim : minCostClause.getDimsInPath()) {
+      if (!qdims.contains(dim)) {
+        joiningOptionalTables.add(dim);
+      }
+    }
+
+    minCostClause.initChainColumns();
+    // prune candidate dims of joiningOptionalTables wrt joining columns
+    for (Dimension dim : joiningOptionalTables) {
+      for (Iterator<CandidateDim> i = cubeql.getCandidateDimTables().get(dim).iterator(); i.hasNext();) {
+        CandidateDim cDim = i.next();
+        if (!cDim.getColumns().containsAll(minCostClause.chainColumns.get(dim))) {
+          i.remove();
+          log.info("Not considering dimTable:{} as its columns are not part of any join paths. Join columns:{}",
+            cDim.getTable(), minCostClause.chainColumns.get(dim));
+          cubeql.addDimPruningMsgs(dim, cDim.getTable(),
+            CandidateTablePruneCause.noColumnPartOfAJoinPath(minCostClause.chainColumns.get(dim)));
+        }
+      }
+      if (cubeql.getCandidateDimTables().get(dim).size() == 0) {
+        throw new LensException(LensCubeErrorCode.NO_DIM_HAS_COLUMN.getLensErrorInfo(), dim.getName(),
+          minCostClause.chainColumns.get(dim).toString());
+      }
+    }
+
+    return joiningOptionalTables;
+  }
+
+  public Map<Aliased<Dimension>, List<JoinPath>> getAllPaths() {
+    return allPaths;
+  }
+
+  public boolean isReachableDim(Dimension dim) {
+    Aliased<Dimension> aliased = Aliased.create(dim);
+    return isReachableDim(aliased);
+  }
+
+  public boolean isReachableDim(Dimension dim, String alias) {
+    Aliased<Dimension> aliased = Aliased.create(dim, alias);
+    return isReachableDim(aliased);
+  }
+
+  private boolean isReachableDim(Aliased<Dimension> aliased) {
+    return allPaths.containsKey(aliased) && !allPaths.get(aliased).isEmpty();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java
new file mode 100644
index 0000000..acc9d5c
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java
@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse.join;
+
+import java.util.*;
+
+import org.apache.lens.cube.metadata.AbstractCubeTable;
+import org.apache.lens.cube.metadata.Dimension;
+import org.apache.lens.cube.metadata.join.TableRelationship;
+import org.apache.lens.cube.parse.Aliased;
+import org.apache.lens.cube.parse.CubeQueryContext;
+
+import lombok.Getter;
+import lombok.ToString;
+
+@ToString
+public class JoinClause implements Comparable<JoinClause> {
+  private final int cost;
+  // all dimensions in path except target
+  @Getter
+  private final Set<Dimension> dimsInPath;
+  private CubeQueryContext cubeql;
+  private final Map<Aliased<Dimension>, List<TableRelationship>> chain;
+  @Getter
+  private final JoinTree joinTree;
+  transient Map<AbstractCubeTable, Set<String>> chainColumns = new HashMap<>();
+
+  public JoinClause(CubeQueryContext cubeql, Map<Aliased<Dimension>,
+    List<TableRelationship>> chain, Set<Dimension> dimsInPath) {
+    this.cubeql = cubeql;
+    this.chain = chain;
+    this.joinTree = mergeJoinChains(chain);
+    this.cost = joinTree.getNumEdges();
+    this.dimsInPath = dimsInPath;
+  }
+
+  void initChainColumns() {
+    for (List<TableRelationship> path : chain.values()) {
+      for (TableRelationship edge : path) {
+        Set<String> fcols = chainColumns.get(edge.getFromTable());
+        if (fcols == null) {
+          fcols = new HashSet<>();
+          chainColumns.put(edge.getFromTable(), fcols);
+        }
+        fcols.add(edge.getFromColumn());
+
+        Set<String> tocols = chainColumns.get(edge.getToTable());
+        if (tocols == null) {
+          tocols = new HashSet<>();
+          chainColumns.put(edge.getToTable(), tocols);
+        }
+        tocols.add(edge.getToColumn());
+      }
+    }
+  }
+
+  public int getCost() {
+    return cost;
+  }
+
+  @Override
+  public int compareTo(JoinClause joinClause) {
+    return cost - joinClause.getCost();
+  }
+
+  /**
+   * Takes chains and merges them in the form of a tree. If two chains have some common path till some table and
+   * bifurcate from there, then in the chain, both paths will have the common path but the resultant tree will have
+   * single path from root(cube) to that table and paths will bifurcate from there.
+   * <p/>
+   * For example, citystate   =   [basecube.cityid=citydim.id], [citydim.stateid=statedim.id]
+   *              cityzip     =   [basecube.cityid=citydim.id], [citydim.zipcode=zipdim.code]
+   * <p/>
+   * Without merging, the behaviour is like this:
+   * <p/>
+   * <p/>
+   *                  (basecube.cityid=citydim.id)          (citydim.stateid=statedim.id)
+   *                  _____________________________citydim____________________________________statedim
+   *                 |
+   *   basecube------|
+   *                 |_____________________________citydim____________________________________zipdim
+   *
+   *                  (basecube.cityid=citydim.id)          (citydim.zipcode=zipdim.code)
+   *
+   * <p/>
+   * Merging will result in a tree like following
+   * <p/>                                                  (citydim.stateid=statedim.id)
+   * <p/>                                                ________________________________ statedim
+   *             (basecube.cityid=citydim.id)           |
+   * basecube-------------------------------citydim---- |
+   *                                                    |________________________________  zipdim
+   *
+   *                                                       (citydim.zipcode=zipdim.code)
+   *
+   * <p/>
+   * Doing this will reduce the number of joins wherever possible.
+   *
+   * @param chain Joins in Linear format.
+   * @return Joins in Tree format
+   */
+  public JoinTree mergeJoinChains(Map<Aliased<Dimension>, List<TableRelationship>> chain) {
+    Map<String, Integer> aliasUsage = new HashMap<>();
+    JoinTree root = JoinTree.createRoot();
+    for (Map.Entry<Aliased<Dimension>, List<TableRelationship>> entry : chain.entrySet()) {
+      JoinTree current = root;
+      // Last element in this list is link from cube to first dimension
+      for (int i = entry.getValue().size() - 1; i >= 0; i--) {
+        // Adds a child if needed, or returns a child already existing corresponding to the given link.
+        current = current.addChild(entry.getValue().get(i), cubeql, aliasUsage);
+      }
+      // This is a destination table. Decide alias separately. e.g. chainname
+      // nullcheck is necessary because dimensions can be destinations too. In that case getAlias() == null
+      if (entry.getKey().getAlias() != null) {
+        current.setAlias(entry.getKey().getAlias());
+      }
+    }
+    if (root.getSubtrees().size() > 0) {
+      root.setAlias(cubeql.getAliasForTableName(
+        root.getSubtrees().keySet().iterator().next().getFromTable().getName()));
+    }
+    return root;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinTree.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinTree.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinTree.java
new file mode 100644
index 0000000..197847c
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinTree.java
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse.join;
+
+import java.util.*;
+
+import org.apache.lens.cube.metadata.AbstractCubeTable;
+import org.apache.lens.cube.metadata.join.TableRelationship;
+import org.apache.lens.cube.parse.CubeQueryContext;
+
+import org.apache.hadoop.hive.ql.parse.JoinType;
+
+import lombok.Data;
+import lombok.EqualsAndHashCode;
+import lombok.ToString;
+
+@Data
+@ToString(exclude = "parent")
+@EqualsAndHashCode(exclude = "parent")
+public class JoinTree {
+  //parent of the node
+  JoinTree parent;
+  // current table is parentRelationship.destTable;
+  TableRelationship parentRelationship;
+  // Alias for the join clause
+  String alias;
+  private Map<TableRelationship, JoinTree> subtrees = new LinkedHashMap<>();
+  // Number of nodes from root to this node. depth of root is 0. Unused for now.
+  private int depthFromRoot;
+  // join type of the current table.
+  JoinType joinType;
+
+  public static JoinTree createRoot() {
+    return new JoinTree(null, null, 0);
+  }
+
+  public JoinTree(JoinTree parent, TableRelationship tableRelationship,
+                  int depthFromRoot) {
+    this.parent = parent;
+    this.parentRelationship = tableRelationship;
+    this.depthFromRoot = depthFromRoot;
+  }
+
+  public JoinTree addChild(TableRelationship tableRelationship,
+                           CubeQueryContext query, Map<String, Integer> aliasUsage) {
+    if (getSubtrees().get(tableRelationship) == null) {
+      JoinTree current = new JoinTree(this, tableRelationship,
+        this.depthFromRoot + 1);
+      // Set alias. Need to compute only when new node is being created.
+      // The following code ensures that For intermediate tables, aliases are given
+      // in the order cityDim, cityDim_0, cityDim_1, ...
+      // And for destination tables, an alias will be decided from here but might be
+      // overridden outside this function.
+      AbstractCubeTable destTable = tableRelationship.getToTable();
+      current.setAlias(query.getAliasForTableName(destTable.getName()));
+      if (aliasUsage.get(current.getAlias()) == null) {
+        aliasUsage.put(current.getAlias(), 0);
+      } else {
+        aliasUsage.put(current.getAlias(), aliasUsage.get(current.getAlias()) + 1);
+        current.setAlias(current.getAlias() + "_" + (aliasUsage.get(current.getAlias()) - 1));
+      }
+      getSubtrees().put(tableRelationship, current);
+    }
+    return getSubtrees().get(tableRelationship);
+  }
+
+  // Recursive computation of number of edges.
+  public int getNumEdges() {
+    int ret = 0;
+    for (JoinTree tree : getSubtrees().values()) {
+      ret += 1;
+      ret += tree.getNumEdges();
+    }
+    return ret;
+  }
+
+  public boolean isLeaf() {
+    return getSubtrees().isEmpty();
+  }
+
+  // Breadth First Traversal. Unused currently.
+  public Iterator<JoinTree> bft() {
+    return new Iterator<JoinTree>() {
+      List<JoinTree> remaining = new ArrayList<JoinTree>() {
+        {
+          addAll(getSubtrees().values());
+        }
+      };
+
+      @Override
+      public boolean hasNext() {
+        return remaining.isEmpty();
+      }
+
+      @Override
+      public JoinTree next() {
+        JoinTree retVal = remaining.remove(0);
+        remaining.addAll(retVal.getSubtrees().values());
+        return retVal;
+      }
+
+      @Override
+      public void remove() {
+        throw new RuntimeException("Not implemented");
+      }
+    };
+  }
+
+  // Depth first traversal of the tree. Used in forming join string.
+  public Iterator<JoinTree> dft() {
+    return new Iterator<JoinTree>() {
+      Stack<JoinTree> joinTreeStack = new Stack<JoinTree>() {
+        {
+          addAll(getSubtrees().values());
+        }
+      };
+
+      @Override
+      public boolean hasNext() {
+        return !joinTreeStack.isEmpty();
+      }
+
+      @Override
+      public JoinTree next() {
+        JoinTree retVal = joinTreeStack.pop();
+        joinTreeStack.addAll(retVal.getSubtrees().values());
+        return retVal;
+      }
+
+      @Override
+      public void remove() {
+        throw new RuntimeException("Not implemented");
+      }
+    };
+  }
+
+  public Set<JoinTree> leaves() {
+    Set<JoinTree> leaves = new HashSet<>();
+    Iterator<JoinTree> dft = dft();
+    while (dft.hasNext()) {
+      JoinTree cur = dft.next();
+      if (cur.isLeaf()) {
+        leaves.add(cur);
+      }
+    }
+    return leaves;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinUtils.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinUtils.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinUtils.java
new file mode 100644
index 0000000..4efa67b
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinUtils.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse.join;
+
+import org.apache.hadoop.hive.ql.parse.JoinType;
+
+public class JoinUtils {
+
+  private JoinUtils() {
+  }
+
+  public static String getJoinTypeStr(JoinType joinType) {
+    if (joinType == null) {
+      return "";
+    }
+    switch (joinType) {
+    case FULLOUTER:
+      return " full outer";
+    case INNER:
+      return " inner";
+    case LEFTOUTER:
+      return " left outer";
+    case LEFTSEMI:
+      return " left semi";
+    case UNIQUE:
+      return " unique";
+    case RIGHTOUTER:
+      return " right outer";
+    default:
+      return "";
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index 1638825..0c4871c 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -30,7 +30,7 @@ import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
-import org.apache.lens.cube.metadata.ReferencedDimAtrribute.ChainRefCol;
+import org.apache.lens.cube.metadata.ReferencedDimAttribute.ChainRefCol;
 import org.apache.lens.cube.metadata.timeline.EndsAndHolesPartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.PartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.StoreAllPartitionTimeline;
@@ -147,7 +147,7 @@ public class TestCubeMetastoreClient {
     CubeMetastoreClient.close();
   }
 
-  private static void defineCube(String cubeName, String cubeNameWithProps, String derivedCubeName,
+  private static void defineCube(final String cubeName, String cubeNameWithProps, String derivedCubeName,
     String derivedCubeNameWithProps) throws LensException {
     cubeMeasures = new HashSet<>();
     cubeMeasures.add(new ColumnMeasure(
@@ -177,21 +177,16 @@ public class TestCubeMetastoreClient {
     }
     cubeDimensions = new HashSet<>();
     List<CubeDimAttribute> locationHierarchy = new ArrayList<>();
-    locationHierarchy.add(new ReferencedDimAtrribute(new FieldSchema("zipcode", "int", "zip"), "Zip refer",
-      new TableReference("zipdim", "zipcode")));
-    locationHierarchy.add(new ReferencedDimAtrribute(new FieldSchema("cityid", "int", "city"), "City refer",
-      new TableReference("citydim", "id")));
-    locationHierarchy.add(new ReferencedDimAtrribute(new FieldSchema("stateid", "int", "state"), "State refer",
-      new TableReference("statedim", "id")));
-    locationHierarchy.add(new ReferencedDimAtrribute(new FieldSchema("countryid", "int", "country"), "Country refer",
-      new TableReference("countrydim", "id")));
+    locationHierarchy.add(new BaseDimAttribute(new FieldSchema("zipcode", "int", "zip")));
+    locationHierarchy.add(new BaseDimAttribute(new FieldSchema("cityid", "int", "city")));
+    locationHierarchy.add(new BaseDimAttribute(new FieldSchema("stateid", "int", "state")));
+    locationHierarchy.add(new BaseDimAttribute(new FieldSchema("countryid", "int", "country")));
     List<String> regions = Arrays.asList("APAC", "EMEA", "USA");
     locationHierarchy.add(new BaseDimAttribute(new FieldSchema("regionname", "string", "region"), "regionname", null,
       null, null, null, regions));
     cubeDimensions.add(new HierarchicalDimAttribute("location", "location hierarchy", locationHierarchy));
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("dim1", "string", "basedim")));
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim2", "id", "ref dim"), "Dim2 refer",
-      new TableReference("testdim2", "id")));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("dim2", "id", "ref dim"), "Dim2 refer", null, null, null));
     Set<CubeDimAttribute> dummyDimAttributes = Sets.newHashSet();
     for (int i = 0; i < 5000; i++) {
       dummyDimAttributes.add(new BaseDimAttribute(new FieldSchema("dummy_dim" + i, "string", "dummy dim " + i),
@@ -224,15 +219,14 @@ public class TestCubeMetastoreClient {
       "SUBSTR EXPR", expr1, expr2));
 
     List<CubeDimAttribute> locationHierarchyWithStartTime = new ArrayList<>();
-    locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("zipcode2", "int", "zip"),
-      "Zip refer2", new TableReference("zipdim", "zipcode"), NOW, NOW,
-      100.0, true, 1000L));
-    locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("cityid2", "int", "city"),
-      "City refer2", new TableReference("citydim", "id"), NOW, null, null));
-    locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("stateid2", "int", "state"),
-      "state refer2", new TableReference("statedim", "id"), NOW, null, 100.0));
-    locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("countryid2", "int", "country"),
-      "Country refer2", new TableReference("countrydim", "id"), null, null, null));
+    locationHierarchyWithStartTime.add(new BaseDimAttribute(new FieldSchema("zipcode2", "int", "zip"),
+      "Zip refer2", NOW, NOW, 100.0, 1000L));
+    locationHierarchyWithStartTime.add(new BaseDimAttribute(new FieldSchema("cityid2", "int", "city"),
+      "City refer2", NOW, null, null));
+    locationHierarchyWithStartTime.add(new BaseDimAttribute(new FieldSchema("stateid2", "int", "state"),
+      "state refer2", NOW, null, 100.0));
+    locationHierarchyWithStartTime.add(new BaseDimAttribute(new FieldSchema("countryid2", "int", "country"),
+      "Country refer2", null, null, null));
     locationHierarchyWithStartTime.add(new BaseDimAttribute(new FieldSchema("regionname2", "string", "region"),
       "regionname2", null, null, null, null, regions));
 
@@ -240,19 +234,10 @@ public class TestCubeMetastoreClient {
       .add(new HierarchicalDimAttribute("location2", "localtion hierarchy2", locationHierarchyWithStartTime));
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("dim1startTime", "string", "basedim"),
       "Dim With starttime", NOW, null, 100.0));
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim2start", "string", "ref dim"),
-      "Dim2 with starttime", new TableReference("testdim2", "id"),
-      NOW, NOW, 100.0));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("dim2start", "string", "ref dim"),
+      "Dim2 with starttime", NOW, NOW, 100.0));
 
-    List<TableReference> multiRefs = new ArrayList<>();
-    multiRefs.add(new TableReference("testdim2", "id"));
-    multiRefs.add(new TableReference("testdim3", "id"));
-    multiRefs.add(new TableReference("testdim4", "id"));
-
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim3", "string", "multi ref dim"), "Dim3 refer",
-      multiRefs));
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim3start", "string", "multi ref dim"),
-      "Dim3 with starttime", multiRefs, NOW, null, 100.0));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("dim3", "string", "multi ref dim")));
 
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("region", "string", "region dim"), "region", null, null,
       null, null, regions));
@@ -278,7 +263,81 @@ public class TestCubeMetastoreClient {
     chain.add(new TableReference("citydim", "id"));
     cityChain.addPath(chain);
     joinChains.add(cityChain);
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("zipcityname", "string", "zip city name"),
+    joinChains.add(new JoinChain("cubeState", "cube-state", "state thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "stateid"));
+            add(new TableReference("statedim", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "stateid2"));
+            add(new TableReference("statedim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.add(new JoinChain("cubeCountry", "cube-country", "country thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "countryid"));
+            add(new TableReference("countrydim", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "countryid2"));
+            add(new TableReference("countrydim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.add(new JoinChain("dim2chain", "cube-dim2", "state thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2"));
+            add(new TableReference("testdim2", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2start"));
+            add(new TableReference("testdim2", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim3"));
+            add(new TableReference("testdim2", "id"));
+          }
+        });
+      }
+    });
+    joinChains.add(new JoinChain("dim3chain", "cube-dim3", "state thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim3"));
+            add(new TableReference("testdim3", "id"));
+          }
+        });
+      }
+    });
+    joinChains.add(new JoinChain("dim4chain", "cube-dim4", "state thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim3"));
+            add(new TableReference("testdim4", "id"));
+          }
+        });
+      }
+    });
+    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("zipcityname", "string", "zip city name"),
       "Zip city name", "cityFromZip", "name", null, null, null));
     cubeMeasures.addAll(dummyMeasure);
     cubeDimensions.addAll(dummyDimAttributes);
@@ -302,45 +361,118 @@ public class TestCubeMetastoreClient {
   }
 
   private static void defineUberDims() throws LensException {
+    Map<String, String> dimProps = new HashMap<>();
     // Define zip dimension
     zipAttrs.add(new BaseDimAttribute(new FieldSchema("zipcode", "int", "code")));
     zipAttrs.add(new BaseDimAttribute(new FieldSchema("f1", "string", "field1")));
     zipAttrs.add(new BaseDimAttribute(new FieldSchema("f2", "string", "field1")));
-    List<TableReference> stateRefs = Lists.newArrayList(new TableReference("statedim", "id"),
-      new TableReference("stateWeatherDim", "id"));
-    zipAttrs.add(new ReferencedDimAtrribute(new FieldSchema("stateid", "int", "state id"), "State refer", stateRefs));
-    zipAttrs.add(new ReferencedDimAtrribute(new FieldSchema("cityid", "int", "city id"), "City refer",
-      new TableReference("citydim", "id")));
-    zipAttrs.add(new ReferencedDimAtrribute(new FieldSchema("countryid", "int", "country id"), "Country refer",
-      new TableReference("countrydim", "id")));
-    zipDim = new Dimension("zipdim", zipAttrs);
+    zipAttrs.add(new BaseDimAttribute(new FieldSchema("stateid", "int", "state id"), "State refer", null, null, null));
+    zipAttrs.add(new BaseDimAttribute(new FieldSchema("cityid", "int", "city id"), "City refer", null, null, null));
+    zipAttrs.add(new BaseDimAttribute(new FieldSchema("countryid", "int", "country id"), "Country refer", null, null,
+      null));
+    zipAttrs.add(new ReferencedDimAttribute(new FieldSchema("statename", "name", "state name"), "State Name",
+      "zipstate", "name", null, null, null));
+
+    Set<JoinChain> joinChains = new HashSet<>();
+    joinChains.add(new JoinChain("zipCity", "zip-city", "city thru zip") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("zipdim", "cityid"));
+            add(new TableReference("citydim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.add(new JoinChain("zipState", "zip-state", "state thru zip") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("zipdim", "stateid"));
+            add(new TableReference("statedim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.add(new JoinChain("zipCountry", "zip-country", "country thru zip") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("zipdim", "countryid"));
+            add(new TableReference("countrydim", "id"));
+          }
+        });
+      }
+    });
+    zipDim = new Dimension("zipdim", zipAttrs, null, joinChains, dimProps, 0L);
 
     // Define city table
+    joinChains = new HashSet<>();
+    dimProps = new HashMap<>();
     cityAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "code")));
     cityAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "city name")));
-    cityAttrs.add(new ReferencedDimAtrribute(new FieldSchema("stateid", "int", "state id"), "State refer",
-      new TableReference("statedim", "id")));
+    cityAttrs.add(new BaseDimAttribute(new FieldSchema("stateid", "int", "state id"), "State refer", null, null, null));
+    cityAttrs.add(new ReferencedDimAttribute(new FieldSchema("statename", "name", "state name"), "State Name",
+      "citystate", "name", null, null, null));
     dimExpressions.add(new ExprColumn(new FieldSchema("stateAndCountry", "String", "state and country together"),
-      "State and Country", new ExprSpec("concat(statedim.name, \":\", countrydim.name)", null, null),
+      "State and Country", new ExprSpec("concat(cityState.name, \":\", cityCountry.name)", null, null),
       new ExprSpec("state_and_country", null, null)));
     dimExpressions.add(new ExprColumn(new FieldSchema("CityAddress", "string", "city with state and city and zip"),
-      "City Address", "concat(citydim.name, \":\", statedim.name, \":\", countrydim.name, \":\", zipcode.code)"));
-    Map<String, String> dimProps = getHashMap(getDimTimedDimensionKey("citydim"), getDatePartitionKey());
-    cityDim = new Dimension("citydim", cityAttrs, dimExpressions, dimProps, 0L);
+      "City Address", "concat(citydim.name, \":\", cityState.name, \":\", cityCountry.name, \":\", zipcode.code)"));
+    dimProps.put(MetastoreUtil.getDimTimedDimensionKey("citydim"), TestCubeMetastoreClient.getDatePartitionKey());
+
+
+    joinChains.add(new JoinChain("cityState", "city-state", "state thru city") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("citydim", "stateid"));
+            add(new TableReference("statedim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.add(new JoinChain("cityCountry", "city-state", "country thru city") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("citydim", "stateid"));
+            add(new TableReference("statedim", "id"));
+            add(new TableReference("statedim", "countryid"));
+            add(new TableReference("countrydim", "id"));
+          }
+        });
+      }
+    });
+    cityDim = new Dimension("citydim", cityAttrs, dimExpressions, joinChains, dimProps, 0L);
 
     // Define state table
+    joinChains = new HashSet<>();
+    dimProps = new HashMap<>();
     stateAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "state id"), "State ID", null, null, null));
     stateAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "state name")));
     stateAttrs.add(new BaseDimAttribute(new FieldSchema("capital", "string", "state capital")));
-    stateAttrs.add(new ReferencedDimAtrribute(new FieldSchema("countryid", "int", "country id"), "Country refer",
-      new TableReference("countrydim", "id")));
+    stateAttrs.add(new BaseDimAttribute(new FieldSchema("countryid", "int", "country id"), "Country refer", null,
+      null, null));
+    stateAttrs.add(new ReferencedDimAttribute(new FieldSchema("countryname", "name", "country name"), "country Name",
+      "statecountry", "name", null, null, null));
+    joinChains.add(new JoinChain("stateCountry", "state country", "country thru state") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("statedim", "countryid"));
+            add(new TableReference("countrydim", "id"));
+          }
+        });
+      }
+    });
     stateDim = new Dimension("statedim", stateAttrs);
 
     countryAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "country id")));
     countryAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "country name")));
     countryAttrs.add(new BaseDimAttribute(new FieldSchema("capital", "string", "country capital")));
     countryAttrs.add(new BaseDimAttribute(new FieldSchema("region", "string", "region name")));
-    countryDim = new Dimension("countrydim", stateAttrs);
+    countryDim = new Dimension("countrydim", countryAttrs);
 
   }
 
@@ -380,9 +512,9 @@ public class TestCubeMetastoreClient {
     assertTrue(client.tableExists(stateDim.getName()));
     assertTrue(client.tableExists(countryDim.getName()));
 
-    validateDim(zipDim, zipAttrs, "zipcode", "stateid");
-    validateDim(cityDim, cityAttrs, "id", "stateid");
-    validateDim(stateDim, stateAttrs, "id", "countryid");
+    validateDim(zipDim, zipAttrs, "zipcode", "statename");
+    validateDim(cityDim, cityAttrs, "id", "statename");
+    validateDim(stateDim, stateAttrs, "id", "countryname");
     validateDim(countryDim, countryAttrs, "id", null);
 
     // validate expression in citydim
@@ -396,7 +528,7 @@ public class TestCubeMetastoreClient {
 
     ExprColumn stateCountryExpr = new ExprColumn(new FieldSchema("stateAndCountry", "String",
       "state and country together with hiphen as separator"), "State and Country",
-      "concat(statedim.name, \"-\", countrydim.name)");
+      "concat(citystate.name, \"-\", citycountry.name)");
     ExprSpec expr1 = new ExprSpec();
     expr1.setExpr("concat(countrydim.name, \"-\", countrydim.name)");
     stateCountryExpr.addExpression(expr1);
@@ -420,13 +552,12 @@ public class TestCubeMetastoreClient {
     ExprColumn stateAndCountryActual = city.getExpressionByName("stateAndCountry");
     assertNotNull(stateAndCountryActual.getExpressions());
     assertEquals(2, stateAndCountryActual.getExpressions().size());
-    assertTrue(stateAndCountryActual.getExpressions().contains("concat(statedim.name, \"-\", countrydim.name)"));
-    assertTrue(stateAndCountryActual.getExpressions()
-      .contains("concat(countrydim.name, \"-\", countrydim.name)"));
+    assertTrue(stateAndCountryActual.getExpressions().contains("concat(citystate.name, \"-\", citycountry.name)"));
+    assertTrue(stateAndCountryActual.getExpressions().contains("concat(countrydim.name, \"-\", countrydim.name)"));
 
     assertNotNull(city.getExpressionByName("stateAndCountry"));
     assertEquals(city.getExpressionByName("stateAndCountry").getExpr(),
-      "concat(statedim.name, \"-\", countrydim.name)");
+      "concat(citystate.name, \"-\", citycountry.name)");
 
     stateAndCountryActual.removeExpression("concat(countrydim.name, \"-\", countrydim.name)");
     city.alterExpression(stateAndCountryActual);
@@ -434,7 +565,6 @@ public class TestCubeMetastoreClient {
     Dimension cityAltered = client.getDimension(city.getName());
     assertEquals(1, cityAltered.getExpressionByName("stateAndCountry").getExpressions().size());
 
-
     List<TableReference> chain = new ArrayList<>();
     chain.add(new TableReference("zipdim", "cityid"));
     chain.add(new TableReference("citydim", "id"));
@@ -449,12 +579,11 @@ public class TestCubeMetastoreClient {
     Dimension toAlter = new Dimension(tbl);
     toAlter.alterAttribute(new BaseDimAttribute(new FieldSchema("newZipDim", "int", "new dim added"), null, null, null,
       null, 1000L));
-    toAlter.alterAttribute(new ReferencedDimAtrribute(new FieldSchema("newRefDim", "int", "new ref-dim added"),
-      "New city ref", new TableReference("citydim", "id")));
+    toAlter.alterAttribute(new ReferencedDimAttribute(new FieldSchema("newRefDim", "int", "new ref-dim added"),
+      "New city ref", "cubecity", "name", null, null, null));
     toAlter.alterAttribute(new BaseDimAttribute(new FieldSchema("f2", "varchar", "modified field")));
-    List<TableReference> stateRefs = Lists.newArrayList(new TableReference("statedim", "id"));
-    toAlter.alterAttribute(new ReferencedDimAtrribute(new FieldSchema("stateid", "int", "state id"), "State refer",
-      stateRefs));
+    toAlter.alterAttribute(new BaseDimAttribute(new FieldSchema("stateid", "int", "state id"), "State refer altered",
+      null, null, null));
     toAlter.removeAttribute("f1");
     toAlter.getProperties().put("alter.prop", "altered");
     toAlter.alterExpression(new ExprColumn(new FieldSchema("formattedcode", "string", "formatted zipcode"),
@@ -464,7 +593,6 @@ public class TestCubeMetastoreClient {
     client.alterDimension(zipDim.getName(), toAlter);
     Dimension altered = client.getDimension(zipDim.getName());
 
-
     assertEquals(toAlter, altered);
     assertNotNull(altered.getAttributeByName("newZipDim"));
     assertNotNull(altered.getAttributeByName("newRefDim"));
@@ -481,26 +609,24 @@ public class TestCubeMetastoreClient {
     assertEquals((((BaseDimAttribute) newzipdim).getNumOfDistinctValues().get()), Long.valueOf(1000));
 
     CubeDimAttribute newrefdim = altered.getAttributeByName("newRefDim");
-    assertTrue(newrefdim instanceof ReferencedDimAtrribute);
-    assertEquals(((ReferencedDimAtrribute) newrefdim).getReferences().size(), 1);
-    assertEquals(((ReferencedDimAtrribute) newrefdim).getReferences().get(0).getDestTable(), cityDim.getName());
-    assertEquals(((ReferencedDimAtrribute) newrefdim).getReferences().get(0).getDestColumn(), "id");
+    assertTrue(newrefdim instanceof ReferencedDimAttribute);
+    assertEquals(((ReferencedDimAttribute) newrefdim).getChainRefColumns().size(), 1);
+    assertEquals(((ReferencedDimAttribute) newrefdim).getChainRefColumns().get(0).getChainName(), "cubecity");
+    assertEquals(((ReferencedDimAttribute) newrefdim).getChainRefColumns().get(0).getRefColumn(), "name");
 
     CubeDimAttribute f2 = altered.getAttributeByName("f2");
     assertTrue(f2 instanceof BaseDimAttribute);
     assertEquals(((BaseDimAttribute) f2).getType(), "varchar");
 
     CubeDimAttribute stateid = altered.getAttributeByName("stateid");
-    assertTrue(stateid instanceof ReferencedDimAtrribute);
-    assertEquals(((ReferencedDimAtrribute) stateid).getReferences().size(), 1);
-    assertEquals(((ReferencedDimAtrribute) stateid).getReferences().get(0).getDestTable(), stateDim.getName());
-    assertEquals(((ReferencedDimAtrribute) stateid).getReferences().get(0).getDestColumn(), "id");
+    assertTrue(stateid instanceof BaseDimAttribute);
+    assertEquals(stateid.getDisplayString(), "State refer altered");
 
     assertEquals(altered.getProperties().get("alter.prop"), "altered");
 
     assertEquals(altered.getChainByName("stateFromZip"), zipState);
 
-    assertEquals(altered.getJoinChains().size(), 1);
+    assertEquals(altered.getJoinChains().size(), 4);
     JoinChain zipchain = altered.getChainByName("stateFromZip");
     assertEquals(zipchain.getDisplayString(), "Zip State");
     assertEquals(zipchain.getDescription(), "zip State desc");
@@ -518,14 +644,14 @@ public class TestCubeMetastoreClient {
     Table dimTbl = client.getHiveTable(udim.getName());
     assertTrue(client.isDimension(dimTbl));
     Dimension dim = new Dimension(dimTbl);
-    assertTrue(udim.equals(dim));
+    assertTrue(udim.equals(dim), "Equals failed for " + dim.getName());
     assertTrue(udim.equals(client.getDimension(udim.getName())));
     assertEquals(dim.getAttributes().size(), attrs.size());
     assertNotNull(dim.getAttributeByName(basedim));
     assertTrue(dim.getAttributeByName(basedim) instanceof BaseDimAttribute);
     if (referdim != null) {
       assertNotNull(dim.getAttributeByName(referdim));
-      assertTrue(dim.getAttributeByName(referdim) instanceof ReferencedDimAtrribute);
+      assertTrue(dim.getAttributeByName(referdim) instanceof ReferencedDimAttribute);
     }
     assertEquals(udim.getAttributeNames().size() + udim.getExpressionNames().size(), dim.getAllFieldNames().size());
   }
@@ -575,7 +701,7 @@ public class TestCubeMetastoreClient {
     assertTrue(cube2.getJoinChainNames().contains("cityfromzip"));
     assertTrue(cube2.getJoinChainNames().contains("city"));
     assertFalse(cube2.getJoinChains().isEmpty());
-    assertEquals(cube2.getJoinChains().size(), 2);
+    assertEquals(cube2.getJoinChains().size(), 7);
     JoinChain zipchain = cube2.getChainByName("cityfromzip");
     assertEquals(zipchain.getDisplayString(), "Zip City");
     assertEquals(zipchain.getDescription(), "zip city desc");
@@ -598,7 +724,7 @@ public class TestCubeMetastoreClient {
     assertEquals(citychain.getPaths().get(0).getReferences().get(0).toString(), "testmetastorecube.cityid");
     assertEquals(citychain.getPaths().get(0).getReferences().get(1).toString(), "citydim.id");
     assertNotNull(cube2.getDimAttributeByName("zipcityname"));
-    ChainRefCol zipCityChain = ((ReferencedDimAtrribute) cube2.getDimAttributeByName("zipcityname"))
+    ChainRefCol zipCityChain = ((ReferencedDimAttribute) cube2.getDimAttributeByName("zipcityname"))
       .getChainRefColumns().get(0);
     assertEquals(zipCityChain.getChainName(), "cityfromzip");
     assertEquals(zipCityChain.getRefColumn(), "name");


[43/51] [abbrv] lens git commit: LENS-928 : DELETE request on /resultset API gives 500 in case of InMemory Result

Posted by de...@apache.org.
LENS-928 : DELETE request on /resultset API gives 500 in case of InMemory Result


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/a5091fe0
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/a5091fe0
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/a5091fe0

Branch: refs/heads/current-release-line
Commit: a5091fe0c2b351bd5d8643de9edaac23f2bd3793
Parents: 2f0e5fd
Author: Deepak Barr <de...@apache.org>
Authored: Tue Feb 2 16:36:07 2016 +0530
Committer: Deepak Kumar Barr <de...@apache.org>
Committed: Tue Feb 2 16:36:07 2016 +0530

----------------------------------------------------------------------
 .../src/main/java/org/apache/lens/driver/es/ESDriver.java       | 4 +---
 .../main/java/org/apache/lens/server/api/driver/LensDriver.java | 2 +-
 .../org/apache/lens/server/query/QueryExecutionServiceImpl.java | 5 ++++-
 .../java/org/apache/lens/server/query/TestQueryService.java     | 4 ++++
 4 files changed, 10 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/a5091fe0/lens-driver-es/src/main/java/org/apache/lens/driver/es/ESDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-es/src/main/java/org/apache/lens/driver/es/ESDriver.java b/lens-driver-es/src/main/java/org/apache/lens/driver/es/ESDriver.java
index d166e43..8a4f410 100644
--- a/lens-driver-es/src/main/java/org/apache/lens/driver/es/ESDriver.java
+++ b/lens-driver-es/src/main/java/org/apache/lens/driver/es/ESDriver.java
@@ -195,10 +195,8 @@ public class ESDriver extends AbstractLensDriver {
 
   @Override
   public void closeResultSet(QueryHandle handle) throws LensException {
-    try {
+    if (resultSetMap.containsKey(handle)) {
       resultSetMap.remove(handle);
-    } catch (NullPointerException e) {
-      throw new LensException("The query does not exist or was already purged", e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/a5091fe0/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
index 3d38ddd..ed97673 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
@@ -151,7 +151,7 @@ public interface LensDriver extends Externalizable {
   LensResultSet fetchResultSet(QueryContext context) throws LensException;
 
   /**
-   * Close the resultset for the query.
+   * Close the resultset for the query. Closing an already closed resultset should not result in failures.
    *
    * @param handle The query handle
    * @throws LensException the lens exception

http://git-wip-us.apache.org/repos/asf/lens/blob/a5091fe0/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index 19077d2..e61398b 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -2100,7 +2100,10 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
       acquire(sessionHandle);
       resultSets.remove(queryHandle);
       // Ask driver to close result set
-      getQueryContext(queryHandle).getSelectedDriver().closeResultSet(queryHandle);
+      QueryContext ctx=getQueryContext(queryHandle);
+      if (null != ctx) {
+        ctx.getSelectedDriver().closeResultSet(queryHandle);
+      }
     } finally {
       release(sessionHandle);
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/a5091fe0/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index 5d949d2..ef8c1aa 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -984,6 +984,10 @@ public class TestQueryService extends LensJerseyTest {
     validateInmemoryResult(resultset);
 
     validNotFoundForHttpResult(target(), lensSessionId, handle);
+    waitForPurge(0, queryService.finishedQueries);
+    APIResult result=target.path(handle.toString()).path("resultset")
+      .queryParam("sessionid", lensSessionId).request().delete(APIResult.class);
+    assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
   }
 
   @Test


[24/51] [abbrv] lens git commit: LENS-735 : Remove accepting TableReferences for ReferenceDimAttribute

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 61fb73c..4810559 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -135,7 +135,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
   @Test
   public void testCubeQuery() throws Exception {
     CubeQueryContext rewrittenQuery =
-      rewriteCtx("cube select" + " SUM(msr2) from testCube where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
+      rewriteCtx("cube select SUM(msr2) from testCube where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
     String expected =
       getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
         getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
@@ -151,7 +151,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, false);
     conf.set(DRIVER_SUPPORTED_STORAGES, "C1,C2,C4");
     CubeQueryContext cubeQueryContext =
-      rewriteCtx("cube select" + " SUM(msr2) from testCube where " + THIS_YEAR_RANGE, conf);
+      rewriteCtx("cube select SUM(msr2) from testCube where " + THIS_YEAR_RANGE, conf);
     PruneCauses<CubeFactTable> pruneCause = cubeQueryContext.getFactPruningMsgs();
     int lessDataCauses = 0;
     for (Map.Entry<CubeFactTable, List<CandidateTablePruneCause>> entry : pruneCause.entrySet()) {
@@ -167,7 +167,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
   @Test
   public void testLightestFactFirst() throws Exception {
     // testFact is lighter than testFact2.
-    String hqlQuery = rewrite("cube select" + " SUM(msr2) from testCube where " + TWO_DAYS_RANGE, getConfWithStorages(
+    String hqlQuery = rewrite("cube select SUM(msr2) from testCube where " + TWO_DAYS_RANGE, getConfWithStorages(
       "C2"));
     String expected =
       getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
@@ -202,7 +202,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
   @Test
   public void testDerivedCube() throws ParseException, LensException, HiveException, ClassNotFoundException {
     CubeQueryContext rewrittenQuery =
-      rewriteCtx("cube select" + " SUM(msr2) from derivedCube where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
+      rewriteCtx("cube select SUM(msr2) from derivedCube where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
     String expected =
       getExpectedQuery(DERIVED_CUBE_NAME, "select sum(derivedCube.msr2) FROM ", null, null,
         getWhereForDailyAndHourly2days(DERIVED_CUBE_NAME, "C2_testfact"));
@@ -211,7 +211,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     assertNotNull(rewrittenQuery.getNonExistingParts());
 
     LensException th = getLensExceptionInRewrite(
-      "select SUM(msr4) from derivedCube" + " where " + TWO_DAYS_RANGE, getConf());
+      "select SUM(msr4) from derivedCube where " + TWO_DAYS_RANGE, getConf());
     assertEquals(th.getErrorCode(), LensCubeErrorCode.COLUMN_NOT_FOUND.getLensErrorInfo().getErrorCode());
 
     // test join
@@ -219,27 +219,28 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.setBoolean(DISABLE_AUTO_JOINS, false);
     String hqlQuery;
 
-    hqlQuery = rewrite("cube select" + " testdim2.name, SUM(msr2) from derivedCube where " + TWO_DAYS_RANGE, conf);
+    /*
+    Accessing join chains from derived cubes are not supported yet.
+    hqlQuery = rewrite("cube select dim2chain.name, SUM(msr2) from derivedCube where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(DERIVED_CUBE_NAME, "select testdim2.name, sum(derivedCube.msr2) FROM ", " JOIN "
-          + getDbName() + "c1_testdim2tbl testdim2 ON derivedCube.dim2 = "
-          + " testdim2.id and (testdim2.dt = 'latest') ", null, "group by (testdim2.name)", null,
+      getExpectedQuery(DERIVED_CUBE_NAME, "select dim2chain.name, sum(derivedCube.msr2) FROM ", " JOIN "
+          + getDbName() + "c1_testdim2tbl dim2chain ON derivedCube.dim2 = "
+          + " dim2chain.id and (dim2chain.dt = 'latest') ", null, "group by (dim2chain.name)", null,
         getWhereForDailyAndHourly2days(DERIVED_CUBE_NAME, "c1_summary2"));
     compareQueries(hqlQuery, expected);
 
     // Test that explicit join query passes with join resolver disabled
     conf.setBoolean(DISABLE_AUTO_JOINS, true);
-    List<String> joinWhereConds = new ArrayList<String>();
-    joinWhereConds.add(StorageUtil.getWherePartClause("dt", "testdim2", StorageConstants.getPartitionsForLatest()));
     hqlQuery =
-      rewrite("cube select" + " testdim2.name, SUM(msr2) from derivedCube "
-        + " inner join testdim2 on derivedCube.dim2 = testdim2.id " + "where " + TWO_DAYS_RANGE, conf);
+      rewrite("cube select citydim.name, SUM(msr2) from derivedCube "
+        + " inner join citydim on derivedCube.cityid = citydim.id where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(DERIVED_CUBE_NAME, "select testdim2.name, sum(derivedCube.msr2) FROM ",
-        " inner JOIN " + getDbName() + "c1_testdim2tbl testdim2 ON derivedCube.dim2 = " + " testdim2.id ", null,
-        "group by (testdim2.name)", joinWhereConds,
+      getExpectedQuery(DERIVED_CUBE_NAME, "select citydim.name, sum(derivedCube.msr2) FROM ",
+        " inner JOIN " + getDbName()
+        + "c1_citytable citydim ON derivedCube.cityid = citydim.id  and (citydim.dt = 'latest')", null,
+        "group by (citydim.name)", null,
         getWhereForDailyAndHourly2days(DERIVED_CUBE_NAME, "c1_summary2"));
-    compareQueries(hqlQuery, expected);
+    compareQueries(hqlQuery, expected);*/
   }
 
   @Test
@@ -619,24 +620,24 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.setBoolean(DISABLE_AUTO_JOINS, false);
     String hql, expected;
     hql = rewrite(
-      "select countrydim.name, msr2 from" + " testCube" + " where countrydim.region = 'asia' and "
+      "select cubecountry.name, msr2 from" + " testCube" + " where cubecountry.region = 'asia' and "
         + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(TEST_CUBE_NAME, "select countrydim.name, sum(testcube.msr2)" + " FROM ", " JOIN " + getDbName()
-          + "c3_countrytable_partitioned countrydim on testcube.countryid=countrydim.id and countrydim.dt='latest'",
-        "countrydim.region='asia'",
-        " group by countrydim.name ", null,
+      getExpectedQuery(TEST_CUBE_NAME, "select cubecountry.name, sum(testcube.msr2)" + " FROM ", " JOIN " + getDbName()
+          + "c3_countrytable_partitioned cubecountry on testcube.countryid=cubecountry.id and cubecountry.dt='latest'",
+        "cubecountry.region='asia'",
+        " group by cubecountry.name ", null,
         getWhereForHourly2days(TEST_CUBE_NAME, "C3_testfact2_raw"));
     compareQueries(hql, expected);
     hql = rewrite(
-      "select statedim.name, statedim.countryid, msr2 from" + " testCube" + " where statedim.countryid = 5 and "
+      "select cubestate.name, cubestate.countryid, msr2 from" + " testCube" + " where cubestate.countryid = 5 and "
         + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(TEST_CUBE_NAME, "select statedim.name, statedim.countryid, sum(testcube.msr2)" + " FROM ",
+      getExpectedQuery(TEST_CUBE_NAME, "select cubestate.name, cubestate.countryid, sum(testcube.msr2)" + " FROM ",
         " JOIN " + getDbName()
-          + "c3_statetable_partitioned statedim ON" + " testCube.stateid = statedim.id and statedim.dt = 'latest'",
-        "statedim.countryid=5",
-        " group by statedim.name, statedim.countryid", null,
+          + "c3_statetable_partitioned cubestate ON" + " testCube.stateid = cubestate.id and cubestate.dt = 'latest'",
+        "cubestate.countryid=5",
+        " group by cubestate.name, cubestate.countryid", null,
         getWhereForHourly2days(TEST_CUBE_NAME, "C3_testfact2_raw"));
     compareQueries(hql, expected);
   }
@@ -649,12 +650,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " join citydim on testCube.cityid = citydim.id" + " where "
         + TWO_DAYS_RANGE, conf);
-    List<String> joinWhereConds = new ArrayList<String>();
-    //    joinWhereConds.add(StorageUtil.getWherePartClause("dt", "citydim", StorageConstants.getPartitionsForLatest
-    // ()));
     String expected =
       getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2)" + " FROM ", " INNER JOIN " + getDbName()
-          + "c2_citytable citydim ON" + " testCube.cityid = citydim.id", null, null, joinWhereConds,
+          + "c2_citytable citydim ON" + " testCube.cityid = citydim.id", null, null, null,
         getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
@@ -673,16 +671,15 @@ public class TestCubeRewriter extends TestQueryRewrite {
       rewrite("select statedim.name, SUM(msr2) from" + " testCube" + " join citydim on testCube.cityid = citydim.id"
         + " left outer join statedim on statedim.id = citydim.stateid"
         + " right outer join zipdim on citydim.zipcode = zipdim.code" + " where " + TWO_DAYS_RANGE, getConf());
-    joinWhereConds = new ArrayList<>();
-    joinWhereConds.add(StorageUtil.getWherePartClause("dt", "citydim", StorageConstants.getPartitionsForLatest()));
-    joinWhereConds.add(StorageUtil.getWherePartClause("dt", "zipdim", StorageConstants.getPartitionsForLatest()));
     expected =
       getExpectedQuery(TEST_CUBE_NAME,
         "select statedim.name," + " sum(testcube.msr2) FROM ", "INNER JOIN " + getDbName()
-          + "c1_citytable citydim ON" + " testCube.cityid = citydim.id LEFT OUTER JOIN " + getDbName()
+          + "c1_citytable citydim ON testCube.cityid = citydim.id and citydim.dt='latest' LEFT OUTER JOIN "
+          + getDbName()
           + "c1_statetable statedim" + " ON statedim.id = citydim.stateid AND "
           + "(statedim.dt = 'latest') RIGHT OUTER JOIN " + getDbName() + "c1_ziptable"
-          + " zipdim ON citydim.zipcode = zipdim.code", null, " group by" + " statedim.name ", joinWhereConds,
+          + " zipdim ON citydim.zipcode = zipdim.code and zipdim.dt='latest'", null, " group by" + " statedim.name ",
+        null,
         getWhereForHourly2days(TEST_CUBE_NAME, "C1_testfact2"));
     compareQueries(hqlQuery, expected);
 
@@ -691,14 +688,12 @@ public class TestCubeRewriter extends TestQueryRewrite {
       rewrite("select st.name, SUM(msr2) from" + " testCube TC" + " join citydim CT on TC.cityid = CT.id"
         + " left outer join statedim ST on ST.id = CT.stateid"
         + " right outer join zipdim ZT on CT.zipcode = ZT.code" + " where " + TWO_DAYS_RANGE, getConf());
-    joinWhereConds = new ArrayList<String>();
-    joinWhereConds.add(StorageUtil.getWherePartClause("dt", "ct", StorageConstants.getPartitionsForLatest()));
-    joinWhereConds.add(StorageUtil.getWherePartClause("dt", "zt", StorageConstants.getPartitionsForLatest()));
     expected =
       getExpectedQuery("tc", "select st.name," + " sum(tc.msr2) FROM ", " INNER JOIN " + getDbName()
-          + "c1_citytable ct ON" + " tc.cityid = ct.id LEFT OUTER JOIN " + getDbName() + "c1_statetable st"
+          + "c1_citytable ct ON" + " tc.cityid = ct.id and ct.dt='latest' LEFT OUTER JOIN "
+          + getDbName() + "c1_statetable st"
           + " ON st.id = ct.stateid and (st.dt = 'latest') " + "RIGHT OUTER JOIN " + getDbName() + "c1_ziptable"
-          + " zt ON ct.zipcode = zt.code", null, " group by" + " st.name ", joinWhereConds,
+          + " zt ON ct.zipcode = zt.code and zt.dt='latest'", null, " group by" + " st.name ", null,
         getWhereForHourly2days("tc", "C1_testfact2"));
     compareQueries(hqlQuery, expected);
 
@@ -727,7 +722,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     LensException th = getLensExceptionInRewrite(
       "select name, SUM(msr2) from testCube" + " join citydim" + " where " + TWO_DAYS_RANGE
         + " group by name", getConf());
-    assertEquals(th.getErrorCode(), LensCubeErrorCode.NO_JOIN_CONDITION_AVAIABLE.getLensErrorInfo().getErrorCode());
+    assertEquals(th.getErrorCode(), LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE.getLensErrorInfo().getErrorCode());
   }
 
   @Test
@@ -779,13 +774,10 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery =
       rewrite("select name, SUM(msr2) from" + " testCube join citydim on testCube.cityid = citydim.id where "
         + TWO_DAYS_RANGE, conf);
-    List<String> joinWhereConds = new ArrayList<String>();
-    //    joinWhereConds.add(StorageUtil.getWherePartClause("dt", "citydim", StorageConstants.getPartitionsForLatest
-    // ()));
     String expected =
       getExpectedQuery(TEST_CUBE_NAME, "select citydim.name," + " sum(testcube.msr2) FROM ", "INNER JOIN " + getDbName()
           + "c2_citytable citydim ON" + " testCube.cityid = citydim.id", null, " group by citydim.name ",
-        joinWhereConds, getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
+        null, getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
@@ -860,21 +852,21 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.setBoolean(DISABLE_AUTO_JOINS, false);
     conf.set(DRIVER_SUPPORTED_STORAGES, "C1, C2");
     hqlQuery =
-      rewrite("SELECT citydim.name AS g1," + " CASE  WHEN citydim.name=='NULL'  THEN 'NULL' "
-        + " WHEN citydim.name=='X'  THEN 'X-NAME' " + " WHEN citydim.name=='Y'  THEN 'Y-NAME' "
-        + " ELSE 'DEFAULT'   END  AS g2, " + " statedim.name AS g3," + " statedim.id AS g4, "
-        + " zipdim.code!=1  AND " + " ((zipdim.f1==\"xyz\"  AND  (zipdim.f2 >= \"3\"  AND "
-        + "  zipdim.f2 !=\"NULL\"  AND  zipdim.f2 != \"uk\")) "
-        + "  OR (zipdim.f2==\"adc\"  AND  zipdim.f1==\"js\" "
-        + "  AND  ( citydim.name == \"X\"  OR  citydim.name == \"Y\" )) "
-        + " OR ((zipdim.f1==\"api\"  OR  zipdim.f1==\"uk\"  OR  (zipdim.f1==\"adc\"  AND  zipdim.f1!=\"js\"))"
-        + "  AND  citydim.id==12) ) AS g5," + " zipdim.code==1  AND "
-        + " ((zipdim.f1==\"xyz\"  AND  (zipdim.f2 >= \"3\"  AND "
-        + "  zipdim.f2 !=\"NULL\"  AND  zipdim.f2 != \"uk\")) "
-        + " OR (zipdim.f2==\"adc\"  AND  zipdim.f1==\"js\" "
-        + " AND  ( citydim.name == \"X\"  OR  citydim.name == \"Y\" )) "
-        + "  OR ((zipdim.f1==\"api\"  OR  zipdim.f1==\"uk\"  OR  (zipdim.f1==\"adc\"  AND  zipdim.f1!=\"js\"))"
-        + "    AND  citydim.id==12) ) AS g6, " + "  zipdim.f1 AS g7, "
+      rewrite("SELECT cubecity.name AS g1," + " CASE  WHEN cubecity.name=='NULL'  THEN 'NULL' "
+        + " WHEN cubecity.name=='X'  THEN 'X-NAME' " + " WHEN cubecity.name=='Y'  THEN 'Y-NAME' "
+        + " ELSE 'DEFAULT'   END  AS g2, " + " cubestate.name AS g3," + " cubestate.id AS g4, "
+        + " cubezip.code!=1  AND " + " ((cubezip.f1==\"xyz\"  AND  (cubezip.f2 >= \"3\"  AND "
+        + "  cubezip.f2 !=\"NULL\"  AND  cubezip.f2 != \"uk\")) "
+        + "  OR (cubezip.f2==\"adc\"  AND  cubezip.f1==\"js\" "
+        + "  AND  ( cubecity.name == \"X\"  OR  cubecity.name == \"Y\" )) "
+        + " OR ((cubezip.f1==\"api\"  OR  cubezip.f1==\"uk\"  OR  (cubezip.f1==\"adc\"  AND  cubezip.f1!=\"js\"))"
+        + "  AND  cubecity.id==12) ) AS g5," + " cubezip.code==1  AND "
+        + " ((cubezip.f1==\"xyz\"  AND  (cubezip.f2 >= \"3\"  AND "
+        + "  cubezip.f2 !=\"NULL\"  AND  cubezip.f2 != \"uk\")) "
+        + " OR (cubezip.f2==\"adc\"  AND  cubezip.f1==\"js\" "
+        + " AND  ( cubecity.name == \"X\"  OR  cubecity.name == \"Y\" )) "
+        + "  OR ((cubezip.f1==\"api\"  OR  cubezip.f1==\"uk\"  OR  (cubezip.f1==\"adc\"  AND  cubezip.f1!=\"js\"))"
+        + "    AND  cubecity.id==12) ) AS g6, " + "  cubezip.f1 AS g7, "
         + "  format_number(SUM(msr1),\"##################.###\") AS a1,"
         + "  format_number(SUM(msr2),\"##################.###\") AS a2, "
         + "  format_number(SUM(msr3),\"##################.###\") AS a3, "
@@ -884,35 +876,38 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + "  FROM testCube where " + TWO_DAYS_RANGE + " HAVING (SUM(msr1) >=1000)  AND (SUM(msr2)>=0.01)", conf);
     String actualExpr =
       ""
-        + " join " + getDbName() + "c1_statetable statedim on testcube.stateid=statedim.id and (statedim.dt='latest')"
-        + " join " + getDbName() + "c1_ziptable zipdim on testcube.zipcode = zipdim.code and (zipdim.dt = 'latest')  "
-        + " join " + getDbName() + "c1_citytable citydim on testcube.cityid = citydim.id and (citydim.dt = 'latest')"
+        + " join " + getDbName() + "c1_statetable cubestate on testcube.stateid=cubestate.id and "
+        + "(cubestate.dt='latest')"
+        + " join " + getDbName()
+        + "c1_ziptable cubezip on testcube.zipcode = cubezip.code and (cubezip.dt = 'latest')  "
+        + " join " + getDbName()
+        + "c1_citytable cubecity on testcube.cityid = cubecity.id and (cubecity.dt = 'latest')"
         + "";
     expected =
       getExpectedQuery(
         TEST_CUBE_NAME,
-        "SELECT ( citydim.name ) as `g1` ,"
-          + "  case  when (( citydim.name ) ==  'NULL' ) then  'NULL'  when (( citydim.name ) ==  'X' )"
-          + " then  'X-NAME'  when (( citydim.name ) ==  'Y' ) then  'Y-NAME'"
-          + "  else  'DEFAULT'  end  as `g2` , ( statedim.name ) as `g3` , ( statedim.id ) as `g4` ,"
-          + " ((( zipdim.code ) !=  1 ) and ((((( zipdim.f1 ) ==  \"xyz\" )"
-          + " and (((( zipdim.f2 ) >=  \"3\" ) and (( zipdim.f2 ) !=  \"NULL\" ))"
-          + " and (( zipdim.f2 ) !=  \"uk\" ))) or (((( zipdim.f2 ) ==  \"adc\" )"
-          + " and (( zipdim.f1 ) ==  \"js\" ))"
-          + " and ((( citydim.name ) ==  \"X\" ) or (( citydim.name ) ==  \"Y\" ))))"
-          + " or ((((( zipdim.f1 ) ==  \"api\" )"
-          + " or (( zipdim.f1 ) ==  \"uk\" )) or ((( zipdim.f1 ) ==  \"adc\" )"
-          + " and (( zipdim.f1 ) !=  \"js\" )))"
-          + " and (( citydim.id ) ==  12 )))) as `g5` , ((( zipdim.code ) ==  1 )"
-          + " and ((((( zipdim.f1 ) ==  \"xyz\" ) and (((( zipdim.f2 ) >=  \"3\" )"
-          + " and (( zipdim.f2 ) !=  \"NULL\" ))"
-          + " and (( zipdim.f2 ) !=  \"uk\" ))) or (((( zipdim.f2 ) ==  \"adc\" )"
-          + " and (( zipdim.f1 ) ==  \"js\" ))"
-          + " and ((( citydim.name ) ==  \"X\" ) or (( citydim.name ) ==  \"Y\" ))))"
-          + " or ((((( zipdim.f1 ) ==  \"api\" )"
-          + " or (( zipdim.f1 ) ==  \"uk\" )) or ((( zipdim.f1 ) ==  \"adc\" )"
-          + " and (( zipdim.f1 ) !=  \"js\" )))"
-          + " and (( citydim.id ) ==  12 )))) as `g6` , ( zipdim.f1 ) as `g7` ,"
+        "SELECT ( cubecity.name ) as `g1` ,"
+          + "  case  when (( cubecity.name ) ==  'NULL' ) then  'NULL'  when (( cubecity.name ) ==  'X' )"
+          + " then  'X-NAME'  when (( cubecity.name ) ==  'Y' ) then  'Y-NAME'"
+          + "  else  'DEFAULT'  end  as `g2` , ( cubestate.name ) as `g3` , ( cubestate.id ) as `g4` ,"
+          + " ((( cubezip.code ) !=  1 ) and ((((( cubezip.f1 ) ==  \"xyz\" )"
+          + " and (((( cubezip.f2 ) >=  \"3\" ) and (( cubezip.f2 ) !=  \"NULL\" ))"
+          + " and (( cubezip.f2 ) !=  \"uk\" ))) or (((( cubezip.f2 ) ==  \"adc\" )"
+          + " and (( cubezip.f1 ) ==  \"js\" ))"
+          + " and ((( cubecity.name ) ==  \"X\" ) or (( cubecity.name ) ==  \"Y\" ))))"
+          + " or ((((( cubezip.f1 ) ==  \"api\" )"
+          + " or (( cubezip.f1 ) ==  \"uk\" )) or ((( cubezip.f1 ) ==  \"adc\" )"
+          + " and (( cubezip.f1 ) !=  \"js\" )))"
+          + " and (( cubecity.id ) ==  12 )))) as `g5` , ((( cubezip.code ) ==  1 )"
+          + " and ((((( cubezip.f1 ) ==  \"xyz\" ) and (((( cubezip.f2 ) >=  \"3\" )"
+          + " and (( cubezip.f2 ) !=  \"NULL\" ))"
+          + " and (( cubezip.f2 ) !=  \"uk\" ))) or (((( cubezip.f2 ) ==  \"adc\" )"
+          + " and (( cubezip.f1 ) ==  \"js\" ))"
+          + " and ((( cubecity.name ) ==  \"X\" ) or (( cubecity.name ) ==  \"Y\" ))))"
+          + " or ((((( cubezip.f1 ) ==  \"api\" )"
+          + " or (( cubezip.f1 ) ==  \"uk\" )) or ((( cubezip.f1 ) ==  \"adc\" )"
+          + " and (( cubezip.f1 ) !=  \"js\" )))"
+          + " and (( cubecity.id ) ==  12 )))) as `g6` , ( cubezip.f1 ) as `g7` ,"
           + " format_number(sum(( testcube.msr1 )),  \"##################.###\" ) as `a1` ,"
           + " format_number(sum(( testcube.msr2 )),  \"##################.###\" ) as `a2` ,"
           + " format_number(sum(( testcube.msr3 )),  \"##################.###\" ) as `a3`, "
@@ -923,47 +918,48 @@ public class TestCubeRewriter extends TestQueryRewrite {
           + "  FROM ",
         actualExpr,
         null,
-        " GROUP BY ( citydim.name ), case  when (( citydim.name ) ==  'NULL' ) "
-          + "then  'NULL'  when (( citydim.name ) ==  'X' ) then  'X-NAME'  when (( citydim.name ) ==  'Y' )"
-          + " then  'Y-NAME'  else  'DEFAULT'  end, ( statedim.name ), ( statedim.id ),"
-          + " ((( zipdim.code ) !=  1 ) and ((((( zipdim.f1 ) ==  \"xyz\" ) and (((( zipdim.f2 ) >=  \"3\" )"
-          + " and (( zipdim.f2 ) !=  \"NULL\" )) and (( zipdim.f2 ) !=  \"uk\" ))) or (((( zipdim.f2 ) ==  \"adc\" )"
-          + " and (( zipdim.f1 ) ==  \"js\" )) and ((( citydim.name ) ==  \"X\" ) or (( citydim.name ) ==  \"Y\" ))))"
-          + " or ((((( zipdim.f1 ) ==  \"api\" ) or (( zipdim.f1 ) ==  \"uk\" )) or ((( zipdim.f1 ) ==  \"adc\" )"
-          + " and (( zipdim.f1 ) !=  \"js\" ))) and (( citydim.id ) ==  12 )))), ((( zipdim.code ) ==  1 ) and"
-          + " ((((( zipdim.f1 ) ==  \"xyz\" ) and (((( zipdim.f2 ) >=  \"3\" ) and (( zipdim.f2 ) !=  \"NULL\" ))"
-          + " and (( zipdim.f2 ) !=  \"uk\" ))) or (((( zipdim.f2 ) ==  \"adc\" ) and (( zipdim.f1 ) ==  \"js\" ))"
-          + " and ((( citydim.name ) ==  \"X\" ) or (( citydim.name ) ==  \"Y\" )))) or ((((( zipdim.f1 ) ==  \"api\" )"
-          + " or (( zipdim.f1 ) ==  \"uk\" )) or ((( zipdim.f1 ) ==  \"adc\" ) and (( zipdim.f1 ) !=  \"js\" )))"
-          + " and (( citydim.id ) ==  12 )))), ( zipdim.f1 ) HAVING ((sum(( testcube.msr1 )) >=  1000 ) "
+        " GROUP BY ( cubecity.name ), case  when (( cubecity.name ) ==  'NULL' ) "
+          + "then  'NULL'  when (( cubecity.name ) ==  'X' ) then  'X-NAME'  when (( cubecity.name ) ==  'Y' )"
+          + " then  'Y-NAME'  else  'DEFAULT'  end, ( cubestate.name ), ( cubestate.id ),"
+          + " ((( cubezip.code ) !=  1 ) and ((((( cubezip.f1 ) ==  \"xyz\" ) and (((( cubezip.f2 ) >=  \"3\" )"
+          + " and (( cubezip.f2 ) !=  \"NULL\" )) and (( cubezip.f2 ) !=  \"uk\" ))) or (((( cubezip.f2 ) ==  \"adc\" )"
+          + " and (( cubezip.f1 ) ==  \"js\" )) and ((( cubecity.name ) ==  \"X\" ) or (( cubecity.name ) ==  \"Y\""
+          + " ))))"
+          + " or ((((( cubezip.f1 ) ==  \"api\" ) or (( cubezip.f1 ) ==  \"uk\" )) or ((( cubezip.f1 ) ==  \"adc\" )"
+          + " and (( cubezip.f1 ) !=  \"js\" ))) and (( cubecity.id ) ==  12 )))), ((( cubezip.code ) ==  1 ) and"
+          + " ((((( cubezip.f1 ) ==  \"xyz\" ) and (((( cubezip.f2 ) >=  \"3\" ) and (( cubezip.f2 ) !=  \"NULL\" ))"
+          + " and (( cubezip.f2 ) !=  \"uk\" ))) or (((( cubezip.f2 ) ==  \"adc\" ) and (( cubezip.f1 ) ==  \"js\" ))"
+          + " and ((( cubecity.name ) ==  \"X\" ) or (( cubecity.name ) ==  \"Y\" )))) or ((((( cubezip.f1 )==\"api\" )"
+          + " or (( cubezip.f1 ) ==  \"uk\" )) or ((( cubezip.f1 ) ==  \"adc\" ) and (( cubezip.f1 ) !=  \"js\" )))"
+          + " and (( cubecity.id ) ==  12 )))), ( cubezip.f1 ) HAVING ((sum(( testcube.msr1 )) >=  1000 ) "
           + "and (sum(( testcube.msr2 )) >=  0.01 ))",
         null, getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite(
-        "SELECT citydim.name AS g1,"
-          + " CASE  WHEN citydim.name=='NULL'  THEN 'NULL' "
-          + " WHEN citydim.name=='X'  THEN 'X-NAME' "
-          + " WHEN citydim.name=='Y'  THEN 'Y-NAME' "
+        "SELECT cubecity.name AS g1,"
+          + " CASE  WHEN cubecity.name=='NULL'  THEN 'NULL' "
+          + " WHEN cubecity.name=='X'  THEN 'X-NAME' "
+          + " WHEN cubecity.name=='Y'  THEN 'Y-NAME' "
           + " ELSE 'DEFAULT'   END  AS g2, "
-          + " statedim.name AS g3,"
-          + " statedim.id AS g4, "
-          + " zipdim.code!=1  AND "
-          + " ((zipdim.f1==\"xyz\"  AND  (zipdim.f2 >= \"3\"  AND "
-          + "  zipdim.f2 !=\"NULL\"  AND  zipdim.f2 != \"uk\")) "
-          + "  OR (zipdim.f2==\"adc\"  AND  zipdim.f1==\"js\" "
-          + "  AND  ( citydim.name == \"X\"  OR  citydim.name == \"Y\" )) "
-          + " OR ((zipdim.f1==\"api\"  OR  zipdim.f1==\"uk\"  OR  (zipdim.f1==\"adc\"  AND  zipdim.f1!=\"js\"))"
-          + "  AND  citydim.id==12) ) AS g5,"
-          + " zipdim.code==1  AND "
-          + " ((zipdim.f1==\"xyz\"  AND  (zipdim.f2 >= \"3\"  AND "
-          + "  zipdim.f2 !=\"NULL\"  AND  zipdim.f2 != \"uk\")) "
-          + " OR (zipdim.f2==\"adc\"  AND  zipdim.f1==\"js\" "
-          + " AND  ( citydim.name == \"X\"  OR  citydim.name == \"Y\" )) "
-          + "  OR ((zipdim.f1==\"api\"  OR  zipdim.f1==\"uk\"  OR  (zipdim.f1==\"adc\"  AND  zipdim.f1!=\"js\"))"
-          + "    AND  citydim.id==12) ) AS g6, "
-          + "  zipdim.f1 AS g7, "
+          + " cubestate.name AS g3,"
+          + " cubestate.id AS g4, "
+          + " cubezip.code!=1  AND "
+          + " ((cubezip.f1==\"xyz\"  AND  (cubezip.f2 >= \"3\"  AND "
+          + "  cubezip.f2 !=\"NULL\"  AND  cubezip.f2 != \"uk\")) "
+          + "  OR (cubezip.f2==\"adc\"  AND  cubezip.f1==\"js\" "
+          + "  AND  ( cubecity.name == \"X\"  OR  cubecity.name == \"Y\" )) "
+          + " OR ((cubezip.f1==\"api\"  OR  cubezip.f1==\"uk\"  OR  (cubezip.f1==\"adc\"  AND  cubezip.f1!=\"js\"))"
+          + "  AND  cubecity.id==12) ) AS g5,"
+          + " cubezip.code==1  AND "
+          + " ((cubezip.f1==\"xyz\"  AND  (cubezip.f2 >= \"3\"  AND "
+          + "  cubezip.f2 !=\"NULL\"  AND  cubezip.f2 != \"uk\")) "
+          + " OR (cubezip.f2==\"adc\"  AND  cubezip.f1==\"js\" "
+          + " AND  ( cubecity.name == \"X\"  OR  cubecity.name == \"Y\" )) "
+          + "  OR ((cubezip.f1==\"api\"  OR  cubezip.f1==\"uk\"  OR  (cubezip.f1==\"adc\"  AND  cubezip.f1!=\"js\"))"
+          + "    AND  cubecity.id==12) ) AS g6, "
+          + "  cubezip.f1 AS g7, "
           + "  format_number(SUM(msr1),\"##################.###\") AS a1,"
           + "  format_number(SUM(msr2),\"##################.###\") AS a2, "
           + "  format_number(SUM(msr3),\"##################.###\") AS a3, "
@@ -972,20 +968,20 @@ public class TestCubeRewriter extends TestQueryRewrite {
           + " format_number(SUM(msr1)-(SUM(msr2)+SUM(msr3)),\"##################.###\") AS a6"
           + "  FROM testCube where "
           + TWO_DAYS_RANGE
-          + " group by citydim.name, CASE WHEN citydim.name=='NULL' THEN 'NULL'"
-          + " WHEN citydim.name=='X' THEN 'X-NAME' WHEN citydim.name=='Y' THEN 'Y-NAME'"
-          + " ELSE 'DEFAULT'   END, statedim.name, statedim.id,  zipdim.code!=1  AND"
-          + " ((zipdim.f1==\"xyz\"  AND  (zipdim.f2 >= \"3\"  AND zipdim.f2 !=\"NULL\"  AND  zipdim.f2 != \"uk\"))"
-          + " OR (zipdim.f2==\"adc\"  AND  zipdim.f1==\"js\""
-          + " AND ( citydim.name == \"X\"  OR  citydim.name == \"Y\" ))"
-          + " OR ((zipdim.f1==\"api\"  OR  zipdim.f1==\"uk\"  OR  (zipdim.f1==\"adc\"  AND  zipdim.f1!=\"js\"))"
-          + " AND  citydim.id==12) ),"
-          + " zipdim.code==1  AND  ((zipdim.f1==\"xyz\" AND ( zipdim.f2 >= \"3\"  AND zipdim.f2 !=\"NULL\""
-          + " AND  zipdim.f2 != \"uk\"))"
-          + " OR (zipdim.f2==\"adc\"  AND  zipdim.f1==\"js\""
-          + " AND  ( citydim.name == \"X\"  OR  citydim.name == \"Y\" ))"
-          + " OR ((zipdim.f1=\"api\"  OR  zipdim.f1==\"uk\" OR (zipdim.f1==\"adc\"  AND  zipdim.f1!=\"js\")) AND"
-          + " citydim.id==12))," + " zipdim.f1 " + "HAVING (SUM(msr1) >=1000)  AND (SUM(msr2)>=0.01)", conf);
+          + " group by cubecity.name, CASE WHEN cubecity.name=='NULL' THEN 'NULL'"
+          + " WHEN cubecity.name=='X' THEN 'X-NAME' WHEN cubecity.name=='Y' THEN 'Y-NAME'"
+          + " ELSE 'DEFAULT'   END, cubestate.name, cubestate.id,  cubezip.code!=1  AND"
+          + " ((cubezip.f1==\"xyz\"  AND  (cubezip.f2 >= \"3\"  AND cubezip.f2 !=\"NULL\"  AND  cubezip.f2 != \"uk\"))"
+          + " OR (cubezip.f2==\"adc\"  AND  cubezip.f1==\"js\""
+          + " AND ( cubecity.name == \"X\"  OR  cubecity.name == \"Y\" ))"
+          + " OR ((cubezip.f1==\"api\"  OR  cubezip.f1==\"uk\"  OR  (cubezip.f1==\"adc\"  AND  cubezip.f1!=\"js\"))"
+          + " AND  cubecity.id==12) ),"
+          + " cubezip.code==1  AND  ((cubezip.f1==\"xyz\" AND ( cubezip.f2 >= \"3\"  AND cubezip.f2 !=\"NULL\""
+          + " AND  cubezip.f2 != \"uk\"))"
+          + " OR (cubezip.f2==\"adc\"  AND  cubezip.f1==\"js\""
+          + " AND  ( cubecity.name == \"X\"  OR  cubecity.name == \"Y\" ))"
+          + " OR ((cubezip.f1=\"api\"  OR  cubezip.f1==\"uk\" OR (cubezip.f1==\"adc\"  AND  cubezip.f1!=\"js\")) AND"
+          + " cubecity.id==12))," + " cubezip.f1 " + "HAVING (SUM(msr1) >=1000)  AND (SUM(msr2)>=0.01)", conf);
     compareQueries(hqlQuery, expected);
   }
 
@@ -1570,18 +1566,16 @@ public class TestCubeRewriter extends TestQueryRewrite {
   public void testJoinPathColumnLifeValidation() throws Exception {
     HiveConf testConf = new HiveConf(new HiveConf(getConf(), HiveConf.class));
     testConf.setBoolean(DISABLE_AUTO_JOINS, false);
-    System.out.println("@@ Joins disabled? " + testConf.get(DISABLE_AUTO_JOINS));
     // Set column life of dim2 column in testCube
     CubeMetastoreClient client = CubeMetastoreClient.getInstance(testConf);
     Cube cube = (Cube) client.getCube(TEST_CUBE_NAME);
 
-    ReferencedDimAtrribute col = (ReferencedDimAtrribute) cube.getColumnByName("cdim2");
+    BaseDimAttribute col = (BaseDimAttribute) cube.getColumnByName("cdim2");
     assertNotNull(col);
 
-    final String query = "SELECT cycledim1.name, msr2 FROM testCube where " + TWO_DAYS_RANGE;
+    final String query = "SELECT cdimChain.name, msr2 FROM testCube where " + TWO_DAYS_RANGE;
     try {
       CubeQueryContext context = rewriteCtx(query, testConf);
-      System.out.println("TestJoinPathTimeRange: " + context.toHQL());
       fail("Expected query to fail because of invalid column life");
     } catch (LensException exc) {
       assertEquals(exc.getErrorCode(), LensCubeErrorCode.NO_JOIN_PATH.getLensErrorInfo().getErrorCode());
@@ -1595,10 +1589,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
     Date oneWeekBack = DateUtils.addDays(TWODAYS_BACK, -7);
 
     // Alter cube.dim2 with an invalid column life
-    ReferencedDimAtrribute newDim2 =
-      new ReferencedDimAtrribute(new FieldSchema(col.getName(), "string", "invalid col"), col.getDisplayString(),
-        col.getReferences(), oneWeekBack, null,
-        col.getCost());
+    BaseDimAttribute newDim2 =
+      new BaseDimAttribute(new FieldSchema(col.getName(), "string", "invalid col"), col.getDisplayString(),
+        oneWeekBack, null, col.getCost(), null);
     cube.alterDimension(newDim2);
     client.alterCube(TEST_CUBE_NAME, cube);
     String hql = rewrite(query, testConf);
@@ -1678,7 +1671,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
   @Test
   public void testSelectDimonlyJoinOnCube() throws Exception {
-    String query = "SELECT count (distinct citydim.name) from testCube where " + TWO_DAYS_RANGE;
+    String query = "SELECT count (distinct cubecity.name) from testCube where " + TWO_DAYS_RANGE;
     Configuration conf = new Configuration(getConf());
     conf.setBoolean(DISABLE_AUTO_JOINS, false);
     String hql = rewrite(query, conf);

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
index af9daad..a8390ef 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
@@ -65,12 +65,12 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
         null);
     TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big1);
     // with another table
-    hqlQuery = rewrite("select dim2big1, citydim.name, max(msr3)," + " msr2 from testCube" + " where "
+    hqlQuery = rewrite("select dim2big1, cubecity.name, max(msr3)," + " msr2 from testCube" + " where "
       + TWO_DAYS_RANGE_IT, conf);
     String expecteddim2big1WithAnotherTable = getExpectedQuery(cubeName,
-      "select testcube.dim2big1, citydim.name, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
-        + getDbName() + "c1_citytable citydim " + "on testcube.cityid = citydim.id and citydim.dt = 'latest' ", null,
-      " group by testcube.dim2big1, citydim.name", null,
+      "select testcube.dim2big1, cubecity.name, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
+        + getDbName() + "c1_citytable cubecity " + "on testcube.cityid = cubecity.id and cubecity.dt = 'latest' ", null,
+      " group by testcube.dim2big1, cubecity.name", null,
       getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary4"),
       null);
     TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big1WithAnotherTable);
@@ -84,70 +84,102 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
 
     Configuration conf2 = new Configuration(conf);
     conf2.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
-    hqlQuery = rewrite("select testdim3.name, dim2big1, max(msr3)," + " msr2 from testCube" + " where "
+    hqlQuery = rewrite("select dim3chain.name, dim2big1, max(msr3), msr2 from testCube where "
       + TWO_DAYS_RANGE_IT, conf2);
     String expected =
       getExpectedQuery(cubeName,
-        "select testdim3.name, testcube.dim2big1, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
-          + getDbName() + "c2_testdim2tbl3 testdim2 " + "on testcube.dim2big1 = testdim2.bigid1" + " join "
-          + getDbName() + "c2_testdim3tbl testdim3 on " + "testdim2.testdim3id = testdim3.id", null,
-        " group by testdim3.name, (testcube.dim2big1)", null,
+        "select dim3chain.name, testcube.dim2big1, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
+          + getDbName() + "c2_testdim2tbl3 dim2chain " + "on testcube.dim2big1 = dim2chain.bigid1" + " join "
+          + getDbName() + "c2_testdim3tbl dim3chain on " + "dim2chain.testdim3id = dim3chain.id", null,
+        " group by dim3chain.name, (testcube.dim2big1)", null,
         getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary4"),
         null);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
-    hqlQuery = rewrite("select dim2big1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT, conf2);
+    hqlQuery = rewrite("select dim2big1, max(msr3), msr2 from testCube where " + TWO_DAYS_RANGE_IT, conf2);
     TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big1);
-    hqlQuery = rewrite("select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT, conf2);
+    hqlQuery = rewrite("select dim2big2, max(msr3), msr2 from testCube where " + TWO_DAYS_RANGE_IT, conf2);
     TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big2);
   }
 
   @Test
-  public void testDenormsWithJoins() throws ParseException, LensException, HiveException, ClassNotFoundException {
+  public void testDenormsWithJoins() throws Exception {
     // all following queries use joins to get denorm fields
     Configuration tconf = new Configuration(this.conf);
     tconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
-    String hqlQuery = rewrite("select dim2big1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE,
+    String hqlQuery = rewrite("select dim2big1, max(msr3), msr2 from testCube where " + TWO_DAYS_RANGE,
       tconf);
     String expected =
-      getExpectedQuery(cubeName, "select testdim2.bigid1, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
-          + getDbName() + "c1_testdim2tbl2 testdim2 ON testcube.dim2 = "
-          + " testdim2.id and (testdim2.dt = 'latest') ", null, "group by (testdim2.bigid1)", null,
+      getExpectedQuery(cubeName, "select dim2chain.bigid1, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
+          + getDbName() + "c1_testdim2tbl2 dim2chain ON testcube.dim2 = "
+          + " dim2chain.id and (dim2chain.dt = 'latest') ", null, "group by (dim2chain.bigid1)", null,
         getWhereForDailyAndHourly2days(cubeName, "c1_summary2"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
 
-    hqlQuery =
-      rewrite("select testdim2.name, dim2big1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE, tconf);
-    expected =
+  @Test
+  public void testDenormsWithJoinsWithChainFieldSelected() throws Exception {
+    Configuration tconf = new Configuration(this.conf);
+    tconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    String hqlQuery = rewrite("select dim2chain.name, dim2big1, max(msr3), msr2 from testCube where " + TWO_DAYS_RANGE,
+      tconf);
+    String expected =
       getExpectedQuery(cubeName,
-        "select testdim2.name, testdim2.bigid1, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
-          + getDbName() + "c1_testdim2tbl2 testdim2 ON testcube.dim2 = "
-          + " testdim2.id and (testdim2.dt = 'latest') ", null, "group by testdim2.name, testdim2.bigid1", null,
+        "select dim2chain.name, dim2chain.bigid1, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
+          + getDbName() + "c1_testdim2tbl2 dim2chain ON testcube.dim2 = "
+          + " dim2chain.id and (dim2chain.dt = 'latest') ", null, "group by dim2chain.name, dim2chain.bigid1", null,
         getWhereForDailyAndHourly2days(cubeName, "c1_summary2"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
 
-    hqlQuery =
-      rewrite("select testdim2.name, dim2big1, max(msr3)," + " msr2 from testCube left outer join testdim2"
-        + " where " + TWO_DAYS_RANGE, tconf);
-    expected =
+  @Test
+  public void testDenormsWithJoinsWithChainFieldSelectedAndJoinTypeSpecified() throws Exception {
+    Configuration tconf = new Configuration(this.conf);
+    tconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    tconf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "LEFTOUTER");
+    String hqlQuery = rewrite("select dim2chain.name, dim2big1, max(msr3), msr2 from testCube where " + TWO_DAYS_RANGE,
+      tconf);
+    String expected =
       getExpectedQuery(cubeName,
-        "select testdim2.name, testdim2.bigid1, max(testcube.msr3), sum(testcube.msr2) FROM ", " left outer JOIN "
-          + getDbName() + "c1_testdim2tbl2 testdim2 ON testcube.dim2 = "
-          + " testdim2.id and (testdim2.dt = 'latest') ", null, "group by testdim2.name, testdim2.bigid1", null,
+        "select dim2chain.name, dim2chain.bigid1, max(testcube.msr3), sum(testcube.msr2) FROM ", " LEFT OUTER JOIN "
+          + getDbName() + "c1_testdim2tbl2 dim2chain ON testcube.dim2 = "
+          + " dim2chain.id and (dim2chain.dt = 'latest') ", null, "group by dim2chain.name, dim2chain.bigid1", null,
         getWhereForDailyAndHourly2days(cubeName, "c1_summary2"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
 
-    hqlQuery =
-      rewrite("select testdim3.name, dim2big1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE, tconf);
-    expected =
+  @Test
+  public void testDenormsWithJoinsWithExplicitJoinSpecified() throws Exception {
+    Configuration tconf = new Configuration(this.conf);
+    tconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    // With explicit join specified, automatic join resolver is disabled.
+    // thus querying denorm variables will fail
+    getLensExceptionInRewrite("select testdim2.name, dim2big1, max(msr3), msr2 from testCube left outer join testdim2"
+      + " on testcube.dim2 = testdim2.id where " + TWO_DAYS_RANGE, tconf);
+  }
+
+  @Test
+  public void testDenormsWithJoinsWithMergableChains() throws Exception {
+    Configuration tconf = new Configuration(this.conf);
+    tconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    String hqlQuery =
+      rewrite("select dim3chain.name, dim2big1, max(msr3), msr2 from testCube where " + TWO_DAYS_RANGE,
+        tconf);
+    String expected =
       getExpectedQuery(cubeName,
-        "select testdim3.name, testdim2.bigid1, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
-          + getDbName() + "c1_testdim2tbl3 testdim2 "
-          + "on testcube.dim2 = testdim2.id AND (testdim2.dt = 'latest')" + " join " + getDbName()
-          + "c1_testdim3tbl testdim3 on " + "testdim2.testdim3id = testdim3.id AND (testdim3.dt = 'latest')",
-        null, " group by testdim3.name, (testdim2.bigid1)", null,
+        "select dim3chain.name, dim2chain.bigid1, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
+          + getDbName() + "c1_testdim2tbl3 dim2chain "
+          + "on testcube.dim2 = dim2chain.id AND (dim2chain.dt = 'latest')" + " join " + getDbName()
+          + "c1_testdim3tbl dim3chain on " + "dim2chain.testdim3id = dim3chain.id AND (dim3chain.dt = 'latest')",
+        null, " group by dim3chain.name, (dim2chain.bigid1)", null,
         getWhereForDailyAndHourly2days(cubeName, "c1_summary2"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testDenormsWithJoinsWithNoCandidateStorages() throws Exception {
+    Configuration tconf = new Configuration(this.conf);
+    tconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
     LensException e = getLensExceptionInRewrite(
       "select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE, tconf);
     NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e;
@@ -208,9 +240,9 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
     String hqlQuery = rewrite("select substrdim2big1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE,
       tconf);
     String expected =
-      getExpectedQuery(cubeName, "select substr(testdim2.bigid1, 5), max(testcube.msr3), sum(testcube.msr2) FROM ",
-        " JOIN " + getDbName() + "c1_testdim2tbl2 testdim2 ON testcube.dim2 = "
-          + " testdim2.id and (testdim2.dt = 'latest') ", null, "group by substr(testdim2.bigid1, 5)", null,
+      getExpectedQuery(cubeName, "select substr(dim2chain.bigid1, 5), max(testcube.msr3), sum(testcube.msr2) FROM ",
+        " JOIN " + getDbName() + "c1_testdim2tbl2 dim2chain ON testcube.dim2 = "
+          + " dim2chain.id and (dim2chain.dt = 'latest') ", null, "group by substr(dim2chain.bigid1, 5)", null,
         getWhereForDailyAndHourly2days(cubeName, "c1_summary2"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
@@ -219,15 +251,15 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
   public void testDimensionQuery() throws Exception {
     String hqlQuery = rewrite("select citydim.name, citydim.statename from" + " citydim", conf);
 
-    String joinExpr = " join " + getDbName() + "c1_statetable statedim on"
-        + " citydim.stateid = statedim.id and (statedim.dt = 'latest')";
-    String expected = getExpectedQuery("citydim", "SELECT citydim.name, statedim.name FROM ", joinExpr, null, null,
+    String joinExpr = " join " + getDbName() + "c1_statetable citystate on"
+        + " citydim.stateid = citystate.id and (citystate.dt = 'latest')";
+    String expected = getExpectedQuery("citydim", "SELECT citydim.name, citystate.name FROM ", joinExpr, null, null,
         "c1_citytable", true);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select citydim.statename, citydim.name  from" + " citydim", conf);
 
-    expected = getExpectedQuery("citydim", "SELECT statedim.name, citydim.name FROM ", joinExpr, null, null,
+    expected = getExpectedQuery("citydim", "SELECT citystate.name, citydim.name FROM ", joinExpr, null, null,
         "c1_citytable", true);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
@@ -261,9 +293,9 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
     tConf.set(CubeQueryConfUtil.getValidStorageTablesKey("testFact2"), "C1_testFact2");
     String hqlQuery = rewrite("select test_time_dim2, msr2 from testcube where " + TWO_DAYS_RANGE, tConf);
     String expected =
-      getExpectedQuery(cubeName, "select timehourchain.full_hour, sum(testcube.msr2) FROM ", " join " + getDbName()
-        + "c4_hourDimTbl timehourchain on testcube.test_time_dim_hour_id2  = timehourchain.id", null,
-        " group by timehourchain . full_hour ", null,
+      getExpectedQuery(cubeName, "select timehourchain2.full_hour, sum(testcube.msr2) FROM ", " join " + getDbName()
+        + "c4_hourDimTbl timehourchain2 on testcube.test_time_dim_hour_id2  = timehourchain2.id", null,
+        " group by timehourchain2 . full_hour ", null,
         getWhereForHourly2days("c1_testfact2"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
@@ -276,9 +308,9 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
     tConf.set(CubeQueryConfUtil.getValidStorageTablesKey("testFact"), "C1_testFact");
     String hqlQuery = rewrite("select test_time_dim2, msr2 from testcube where " + TWO_DAYS_RANGE, tConf);
     String expected =
-      getExpectedQuery(cubeName, "select timedatechain.full_date, sum(testcube.msr2) FROM ", " join " + getDbName()
-        + "c4_dayDimTbl timedatechain on testcube.test_time_dim_day_id2  = timedatechain.id", null,
-        " group by timedatechain . full_date ", null,
+      getExpectedQuery(cubeName, "select timedatechain2.full_date, sum(testcube.msr2) FROM ", " join " + getDbName()
+        + "c4_dayDimTbl timedatechain2 on testcube.test_time_dim_day_id2  = timedatechain2.id", null,
+        " group by timedatechain2 . full_date ", null,
         getWhereForDailyAndHourly2days(cubeName, "c1_testfact"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
@@ -302,7 +334,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
     Configuration tConf = new Configuration(conf);
     tConf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "");
     CubeQueryContext cubeql = rewriteCtx("select citydim.zipcode, citydim.statename from" + " citydim", tConf);
-    Set<String> candidateDims = new HashSet<String>();
+    Set<String> candidateDims = new HashSet<>();
     for (CandidateDim cdim : cubeql.getCandidateDims().get(cubeql.getMetastoreClient().getDimension("citydim"))) {
       candidateDims.add(cdim.getName());
     }
@@ -314,10 +346,10 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
   public void testDimensionQueryWithExpressionHavingDenormColumn() throws Exception {
     String hqlQuery = rewrite("select citydim.name, citydim.citystate from" + " citydim", conf);
     String joinExpr =
-      " join " + getDbName() + "c1_statetable statedim on"
-        + " citydim.stateid = statedim.id and (statedim.dt = 'latest')";
+      " join " + getDbName() + "c1_statetable citystate on"
+        + " citydim.stateid = citystate.id and (citystate.dt = 'latest')";
     String expected =
-      getExpectedQuery("citydim", "SELECT citydim.name, concat(citydim.name, \":\", statedim.name) FROM ",
+      getExpectedQuery("citydim", "SELECT citydim.name, concat(citydim.name, \":\", citystate.name) FROM ",
         joinExpr, null, null, "c1_citytable", true);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
index f48e1b7..669a8e9 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
@@ -50,7 +50,7 @@ public class TestExpressionContext extends TestQueryRewrite {
   public void testNestedExpressions() throws Exception {
     CubeQueryContext nestedExprQL = rewriteCtx("select nestedexpr from testCube where " + TWO_DAYS_RANGE, conf);
     Assert.assertNotNull(nestedExprQL.getExprCtx());
-    List<String> expectedExpressions = new ArrayList<String>();
+    List<String> expectedExpressions = new ArrayList<>();
     expectedExpressions.add("avg(( testcube . roundedmsr2 ))");
     expectedExpressions.add("avg(( testcube . equalsums ))");
     expectedExpressions.add(" case  when (( testcube . substrexpr ) =  'xyz' ) then avg(( testcube . msr5 ))"
@@ -60,14 +60,14 @@ public class TestExpressionContext extends TestQueryRewrite {
     expectedExpressions.add("avg(((( testcube . msr3 ) + ( testcube . msr2 )) /  100 ))");
     expectedExpressions.add(" case  when (substr(( testcube . dim1 ),  3 ) =  'xyz' ) then avg(( testcube . msr5 ))"
       + " when (substr(( testcube . dim1 ),  3 ) =  'abc' ) then (avg(( testcube . msr4 )) /  100 ) end ");
-    expectedExpressions.add(" case  when (substr(ascii(( testdim2 . name )),  3 ) =  'xyz' ) then"
-      + " avg(( testcube . msr5 )) when (substr(ascii(( testdim2 . name )),  3 ) =  'abc' ) then"
+    expectedExpressions.add(" case  when (substr(ascii(( dim2chain . name )),  3 ) =  'xyz' ) then"
+      + " avg(( testcube . msr5 )) when (substr(ascii(( dim2chain . name )),  3 ) =  'abc' ) then"
       + " (avg(( testcube . msr4 )) /  100 ) end ");
     expectedExpressions.add(" case  when (substr(( testcube . dim1 ),  3 ) =  'xyz' ) then avg((( testcube . msr2 )"
       + " + ( testcube . msr3 ))) when (substr(( testcube . dim1 ),  3 ) =  'abc' ) then"
       + " (avg(( testcube . msr4 )) /  100 ) end ");
-    expectedExpressions.add(" case  when (substr(ascii(( testdim2 . name )),  3 ) =  'xyz' ) then"
-      + " avg((( testcube . msr2 ) + ( testcube . msr3 ))) when (substr(ascii(( testdim2 . name )),  3 ) =  'abc' )"
+    expectedExpressions.add(" case  when (substr(ascii(( dim2chain . name )),  3 ) =  'xyz' ) then"
+      + " avg((( testcube . msr2 ) + ( testcube . msr3 ))) when (substr(ascii(( dim2chain . name )),  3 ) =  'abc' )"
       + " then (avg(( testcube . msr4 )) /  100 ) end ");
     expectedExpressions.add(" case  when (( testcube . substrexpr ) =  'xyz' ) then avg((( testcube . msr2 )"
       + " + ( testcube . msr3 ))) when (( testcube . substrexpr ) =  'abc' ) then (avg(( testcube . msr4 )) /  100 )"
@@ -75,11 +75,11 @@ public class TestExpressionContext extends TestQueryRewrite {
     expectedExpressions.add(" case  when (substr(( testcube . dim1 ),  3 ) =  'xyz' ) then avg((( testcube . msr2 )"
       + " + ( testcube . msr3 ))) when (substr(( testcube . dim1 ),  3 ) =  'abc' ) then"
       + " (avg(( testcube . msr4 )) /  100 ) end ");
-    expectedExpressions.add(" case  when (substr(ascii(( testdim2 . name )),  3 ) =  'xyz' ) then"
-      + " avg((( testcube . msr2 ) + ( testcube . msr3 ))) when (substr(ascii(( testdim2 . name )),  3 ) =  'abc' )"
+    expectedExpressions.add(" case  when (substr(ascii(( dim2chain . name )),  3 ) =  'xyz' ) then"
+      + " avg((( testcube . msr2 ) + ( testcube . msr3 ))) when (substr(ascii(( dim2chain . name )),  3 ) =  'abc' )"
       + " then (avg(( testcube . msr4 )) /  100 ) end ");
 
-    List<String> actualExpressions = new ArrayList<String>();
+    List<String> actualExpressions = new ArrayList<>();
     for (ExprSpecContext esc : nestedExprQL.getExprCtx().getExpressionContext("nestedexpr", "testcube").getAllExprs()) {
       actualExpressions.add(HQLParser.getString(esc.getFinalAST()));
     }
@@ -92,14 +92,14 @@ public class TestExpressionContext extends TestQueryRewrite {
     CubeQueryContext nestedExprQL = rewriteCtx("select nestedExprWithTimes from testCube where " + TWO_DAYS_RANGE,
       conf);
     Assert.assertNotNull(nestedExprQL.getExprCtx());
-    List<String> expectedExpressions = new ArrayList<String>();
+    List<String> expectedExpressions = new ArrayList<>();
     expectedExpressions.add("avg(( testcube . roundedmsr2 ))");
     expectedExpressions.add("avg(( testcube . equalsums ))");
     expectedExpressions.add("avg(round((( testcube . msr2 ) /  1000 )))");
     expectedExpressions.add("avg((( testcube . msr3 ) + ( testcube . msr4 )))");
     expectedExpressions.add("avg(((( testcube . msr3 ) + ( testcube . msr2 )) /  100 ))");
 
-    List<String> actualExpressions = new ArrayList<String>();
+    List<String> actualExpressions = new ArrayList<>();
     for (ExprSpecContext esc : nestedExprQL.getExprCtx()
       .getExpressionContext("nestedexprwithtimes", "testcube").getAllExprs()) {
       actualExpressions.add(HQLParser.getString(esc.getFinalAST()));

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
index e77f919..6816fac 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
@@ -56,10 +56,10 @@ public class TestExpressionResolver extends TestQueryRewrite {
     Assert.assertEquals(th.getErrorCode(), LensCubeErrorCode.COLUMN_NOT_FOUND.getLensErrorInfo().getErrorCode());
 
     Assert.assertTrue(getLensExceptionErrorMessageInRewrite(
-        "select nocolexpr, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf).contains("nonexist"));
+      "select nocolexpr, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf).contains("nonexist"));
 
     Assert.assertTrue(getLensExceptionErrorMessageInRewrite(
-        "select invalidexpr, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf).contains("invalidexpr"));
+      "select invalidexpr, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf).contains("invalidexpr"));
 
     th = getLensExceptionInRewrite("select invalidexpr, " + "SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE,
         conf);
@@ -194,56 +194,30 @@ public class TestExpressionResolver extends TestQueryRewrite {
         conf);
 
     String join1 =
-      " join " + getDbName() + "c1_citytable citydim"
-        + " on testcube.cityid = citydim.id and (citydim.dt = 'latest') ";
+      " join " + getDbName() + "c1_citytable cubecity"
+        + " on testcube.cityid = cubecity.id and (cubecity.dt = 'latest') ";
     String join2 = " join" + getDbName()
-      + "c1_statetable statedim on" + " testcube.stateid = statedim.id and (statedim.dt = 'latest')";
-    String joinExpr;
+      + "c1_statetable cubestate on" + " testcube.stateid = cubestate.id and (cubestate.dt = 'latest')";
 
     String expected =
-      getExpectedQuery(cubeName, "select concat(citydim.name, \":\", statedim.name),"
+      getExpectedQuery(cubeName, "select concat(cubecity.name, \":\", cubestate.name),"
         + " avg(testcube.msr1 + testcube.msr2) FROM ", join2 + join1, null, " and substr(testcube.dim1, 3) != 'XYZ'"
-          + " group by concat(citydim.name, \":\", statedim.name)", null, getWhereForHourly2days("C1_testfact2_raw"));
+          + " group by concat(cubecity.name, \":\", cubestate.name)", null, getWhereForHourly2days("C1_testfact2_raw"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
   @Test
   public void testExpressionInWhereWithJoinClausePassed() throws Exception {
-
-    String hqlQuery =
-      rewrite("select cityAndState, avgmsr from testCube tc" + " join citydim cd join statedim sd " + " where "
-        + TWO_DAYS_RANGE + " and substrexpr != 'XYZ'", conf);
-
-    String join1 = " inner join " + getDbName() + "c1_citytable cd" + " on tc.cityid = cd.id and (cd.dt = 'latest')";
-    String join2 = " inner join" + getDbName() + "c1_statetable sd on" + " tc.stateid = sd.id and (sd.dt = 'latest')";
-    String expected =
-      getExpectedQuery("tc", "select concat(cd.name, \":\", sd.name)," + " avg(tc.msr1 + tc.msr2) FROM ",
-        join2 + join1, null, " and substr(tc.dim1, 3) != 'XYZ'" + " group by concat(cd.name, \":\", sd.name)", null,
-        getWhereForHourly2days("tc", "C1_testfact2_raw"));
-    TestCubeRewriter.compareQueries(hqlQuery, expected);
+    assertLensExceptionInRewrite("select cityAndState, avgmsr from testCube tc join citydim cd join statedim sd where "
+      + TWO_DAYS_RANGE + " and substrexpr != 'XYZ'", conf, LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE);
   }
 
-  //@Test
+  @Test
   public void testExpressionInJoinClause() throws Exception {
     // expression in join clause
-    /*
-     * This is broken right now as partial join conditions
-     *  List<String> joinWhereConds = new ArrayList<String>();
-    joinWhereConds.add(StorageUtil.getWherePartClause("dt", "statedim", StorageConstants.getPartitionsForLatest()));
-    String hqlQuery =
-      rewrite("select cityAndState, avgmsr from testCube " + " join citydim on substrexpr != 'XYZ' where "
-        + TWO_DAYS_RANGE, conf);
-
-    String joinExpr =
-      "join" + getDbName() + "c1_statetable statedim on" + " testcube.stateid = statedim.id"
-        + " inner join " + getDbName() + "c1_citytable citydim" + " on testcube.cityid = citydim.id "
-        + " and substr(testcube.dim1, 3) != 'XYZ' and (citydim.dt = 'latest') ";
-    String expected =
-      getExpectedQuery(cubeName, "select concat(citydim.name, \":\", statedim.name),"
-          + " avg(testcube.msr1 + testcube.msr2) FROM ", joinExpr, null,
-        " group by concat(citydim.name, \":\", statedim.name)", joinWhereConds,
-        getWhereForHourly2days("C1_testfact2_raw"));
-    TestCubeRewriter.compareQueries(hqlQuery, expected);*/
+    assertLensExceptionInRewrite("select cityAndState, avgmsr from testCube join citydim on substrexpr != 'XYZ' where "
+      + TWO_DAYS_RANGE, conf, LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE);
   }
+
   @Test
   public void testExpressionInHaving() throws Exception {
     // expression with having clause
@@ -318,8 +292,9 @@ public class TestExpressionResolver extends TestQueryRewrite {
 
   @Test
   public void testExpressionFieldWithOtherFields() throws Exception {
-    // select with expression which requires dimension tables. And there is a candidate, which is removed because
-    // the other fields which require the dimension tables as expression ones, are not reachable and
+    // select with expression which requires dimension tables.
+    // And there is a candidate, which is removed because
+    // required the dimension tables in the expression are not reachable and
     // the expression is not evaluable on the candidate.
     LensException th =
       getLensExceptionInRewrite("select cityStateName, msr2expr, msr5, msr15 from testCube where "
@@ -360,15 +335,15 @@ public class TestExpressionResolver extends TestQueryRewrite {
 
     String joinExpr;
     String join1 =
-      " join " + getDbName() + "c1_ziptable zipdim on" + " citydim.zipcode = zipdim.code and (zipdim.dt = 'latest')";
-    String join2 = " join " + getDbName() + "c1_statetable statedim on"
-      + " citydim.stateid = statedim.id and (statedim.dt = 'latest')";
+      " join " + getDbName() + "c1_ziptable cityzip on" + " citydim.zipcode = cityzip.code and (cityzip.dt = 'latest')";
+    String join2 = " join " + getDbName() + "c1_statetable citystate on"
+      + " citydim.stateid = citystate.id and (citystate.dt = 'latest')";
     String join3 = " join " + getDbName()
-      + "c1_countrytable countrydim on" + " statedim.countryid = countrydim.id";
+      + "c1_countrytable citycountry on" + " citystate.countryid = citycountry.id";
     joinExpr = join2 + join3 + join1;
     String expected =
-      getExpectedQuery("citydim", "SELECT citydim.name, concat((citydim.name), \":\", (statedim.name ),"
-        + " \":\",(countrydim.name),  \":\" , ( zipdim . code )) FROM ", joinExpr, null, null, "c1_citytable", true);
+      getExpectedQuery("citydim", "SELECT citydim.name, concat((citydim.name), \":\", (citystate.name ),"
+        + " \":\",(citycountry.name),  \":\" , ( cityzip . code )) FROM ", joinExpr, null, null, "c1_citytable", true);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
 
@@ -399,15 +374,15 @@ public class TestExpressionResolver extends TestQueryRewrite {
 
     String joinExpr;
     String join1 =
-      " join " + getDbName() + "c1_ziptable zipdim on" + " citydim.zipcode = zipdim.code and (zipdim.dt = 'latest')";
-    String join2 = " join " + getDbName() + "c1_statetable statedim on"
-      + " citydim.stateid = statedim.id and (statedim.dt = 'latest')";
+      " join " + getDbName() + "c1_ziptable cityzip on" + " citydim.zipcode = cityzip.code and (cityzip.dt = 'latest')";
+    String join2 = " join " + getDbName() + "c1_statetable citystate on"
+      + " citydim.stateid = citystate.id and (citystate.dt = 'latest')";
     String join3 = " join " + getDbName()
-      + "c1_countrytable countrydim on" + " statedim.countryid = countrydim.id";
+      + "c1_countrytable citycountry on" + " citystate.countryid = citycountry.id";
     joinExpr = join2 + join3 + join1;
     String expected =
-      getExpectedQuery("citydim", "SELECT citydim.name as `cname`, concat((citydim.name), \":\", (statedim.name ),"
-        + " \":\",(countrydim.name),  \":\" , ( zipdim . code )) as `caddr` FROM ", joinExpr, null, null,
+      getExpectedQuery("citydim", "SELECT citydim.name as `cname`, concat((citydim.name), \":\", (citystate.name ),"
+        + " \":\",(citycountry.name),  \":\" , ( cityzip . code )) as `caddr` FROM ", joinExpr, null, null,
         "c1_citytable", true);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
@@ -418,14 +393,14 @@ public class TestExpressionResolver extends TestQueryRewrite {
 
     String joinExpr =
       ""
-        + " join " + getDbName() + "c1_statetable statedim on ct.stateid = statedim.id and (statedim.dt = 'latest')"
-        + " join " + getDbName() + "c1_countrytable countrydim on statedim.countryid = countrydim.id"
-        + " join " + getDbName() + "c1_ziptable zipdim on ct.zipcode = zipdim.code and (zipdim.dt = 'latest')"
+        + " join " + getDbName() + "c1_statetable citystate on ct.stateid = citystate.id and (citystate.dt = 'latest')"
+        + " join " + getDbName() + "c1_countrytable citycountry on citystate.countryid = citycountry.id"
+        + " join " + getDbName() + "c1_ziptable cityzip on ct.zipcode = cityzip.code and (cityzip.dt = 'latest')"
         + "";
 
     String expected =
-      getExpectedQuery("ct", "SELECT ct.name, concat((ct.name), \":\", (statedim.name ),"
-        + " \":\",(countrydim.name),  \":\" , ( zipdim . code )) FROM ", joinExpr, null, null, "c1_citytable", true);
+      getExpectedQuery("ct", "SELECT ct.name, concat((ct.name), \":\", (citystate.name ),"
+        + " \":\",(citycountry.name),  \":\" , ( cityzip . code )) FROM ", joinExpr, null, null, "c1_citytable", true);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
 
@@ -444,14 +419,13 @@ public class TestExpressionResolver extends TestQueryRewrite {
 
     String joinExpr =
       ""
-        + " join " + getDbName() + "c1_statetable statedim on ct.stateid = statedim.id and (statedim.dt = 'latest')"
-        + " join " + getDbName() + "c1_countrytable countrydim on statedim.countryid = countrydim.id"
-        + " join " + getDbName() + "c1_ziptable zipdim on ct.zipcode = zipdim.code and (zipdim.dt = 'latest')"
-        + "";
+        + " join " + getDbName() + "c1_statetable citystate on ct.stateid = citystate.id and (citystate.dt = 'latest')"
+        + " join " + getDbName() + "c1_countrytable citycountry on citystate.countryid = citycountry.id"
+        + " join " + getDbName() + "c1_ziptable cityzip on ct.zipcode = cityzip.code and (cityzip.dt = 'latest')";
 
     String expected =
-      getExpectedQuery("ct", "SELECT ct.name as `cname`, concat((ct.name), \":\", (statedim.name ),"
-        + " \":\",(countrydim.name),  \":\" , ( zipdim . code )) as `caddr` FROM ", joinExpr, null, null,
+      getExpectedQuery("ct", "SELECT ct.name as `cname`, concat((ct.name), \":\", (citystate.name ),"
+        + " \":\",(citycountry.name),  \":\" , ( cityzip . code )) as `caddr` FROM ", joinExpr, null, null,
         "c1_citytable", true);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }


[14/51] [abbrv] lens git commit: LENS-552: Union support across storage tables in multi fact query

Posted by de...@apache.org.
LENS-552: Union support across storage tables in multi fact query


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/04f5a822
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/04f5a822
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/04f5a822

Branch: refs/heads/current-release-line
Commit: 04f5a8223f652baa5cfbebec7f8f9a2886df5076
Parents: bf1053b
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Thu Dec 24 12:39:33 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Thu Dec 24 12:39:33 2015 +0530

----------------------------------------------------------------------
 .../lens/cube/parse/AggregateResolver.java      |  12 +-
 .../apache/lens/cube/parse/AliasReplacer.java   |   5 +-
 .../apache/lens/cube/parse/AutoJoinContext.java |   2 +-
 .../apache/lens/cube/parse/CandidateFact.java   | 130 ++++---------------
 .../apache/lens/cube/parse/ColumnResolver.java  |   2 +-
 .../lens/cube/parse/CubeQueryContext.java       |  69 +++++-----
 .../lens/cube/parse/CubeQueryRewriter.java      |   2 +-
 .../apache/lens/cube/parse/DefaultQueryAST.java |  74 +++++++++++
 .../cube/parse/DenormalizationResolver.java     |  18 +--
 .../apache/lens/cube/parse/DimHQLContext.java   |   6 +-
 .../lens/cube/parse/DimOnlyHQLContext.java      |  15 +--
 .../lens/cube/parse/ExpressionResolver.java     |  24 ++--
 .../apache/lens/cube/parse/FactHQLContext.java  |  65 ----------
 .../apache/lens/cube/parse/GroupbyResolver.java |   2 -
 .../org/apache/lens/cube/parse/HQLParser.java   |   1 -
 .../apache/lens/cube/parse/JoinResolver.java    |  17 +--
 .../lens/cube/parse/MultiFactHQLContext.java    |  46 +++----
 .../org/apache/lens/cube/parse/QueryAST.java    |  86 ++++++++++++
 .../lens/cube/parse/SingleFactHQLContext.java   |  96 --------------
 .../parse/SingleFactMultiStorageHQLContext.java |  52 +++++---
 .../SingleFactSingleStorageHQLContext.java      |  85 ++++++++++++
 .../lens/cube/parse/StorageTableResolver.java   |  43 +++---
 .../apache/lens/cube/metadata/DateFactory.java  |  11 ++
 .../apache/lens/cube/parse/CubeTestSetup.java   |   2 +-
 .../lens/cube/parse/TestBaseCubeQueries.java    |  24 ++--
 .../lens/cube/parse/TestCubeRewriter.java       |  53 ++++++--
 26 files changed, 481 insertions(+), 461 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
index 39bd1cc..fd7036a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
@@ -47,8 +47,6 @@ import lombok.extern.slf4j.Slf4j;
  */
 @Slf4j
 class AggregateResolver implements ContextRewriter {
-  public AggregateResolver(Configuration conf) {
-  }
 
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws LensException {
@@ -166,7 +164,7 @@ class AggregateResolver implements ContextRewriter {
     String colname;
 
     if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) {
-      colname = ((ASTNode) node.getChild(0)).getText();
+      colname = node.getChild(0).getText();
     } else {
       // node in 'alias.column' format
       ASTNode tabident = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL, Identifier);
@@ -193,15 +191,9 @@ class AggregateResolver implements ContextRewriter {
           throw new LensException(LensCubeErrorCode.NO_DEFAULT_AGGREGATE.getLensErrorInfo(), colname);
         }
         ASTNode fnroot = new ASTNode(new CommonToken(HiveParser.TOK_FUNCTION));
-        fnroot.setParent(node.getParent());
-
         ASTNode fnIdentNode = new ASTNode(new CommonToken(HiveParser.Identifier, aggregateFn));
-        fnIdentNode.setParent(fnroot);
         fnroot.addChild(fnIdentNode);
-
-        node.setParent(fnroot);
         fnroot.addChild(node);
-
         return fnroot;
       }
     } else {
@@ -224,7 +216,7 @@ class AggregateResolver implements ContextRewriter {
 
       String colname;
       if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) {
-        colname = ((ASTNode) node.getChild(0)).getText();
+        colname = node.getChild(0).getText();
       } else {
         // node in 'alias.column' format
         ASTNode colIdent = (ASTNode) node.getChild(1);

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
index 0656049..e629731 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
@@ -81,7 +81,7 @@ class AliasReplacer implements ContextRewriter {
 
     replaceAliases(cubeql.getWhereAST(), 0, colToTableAlias);
 
-    replaceAliases(cubeql.getJoinTree(), 0, colToTableAlias);
+    replaceAliases(cubeql.getJoinAST(), 0, colToTableAlias);
 
     // Update the aggregate expression set
     AggregateResolver.updateAggregates(cubeql.getSelectAST(), cubeql);
@@ -183,7 +183,6 @@ class AliasReplacer implements ContextRewriter {
         ASTNode aliasNode = (ASTNode) node.getChild(0);
         ASTNode newAliasIdent = new ASTNode(new CommonToken(HiveParser.Identifier, newAlias));
         aliasNode.setChild(0, newAliasIdent);
-        newAliasIdent.setParent(aliasNode);
       } else {
         // Just a column ref, we need to make it alias.col
         // '.' will become the parent node
@@ -192,9 +191,7 @@ class AliasReplacer implements ContextRewriter {
         ASTNode tabRefNode = new ASTNode(new CommonToken(HiveParser.TOK_TABLE_OR_COL, "TOK_TABLE_OR_COL"));
 
         tabRefNode.addChild(aliasIdentNode);
-        aliasIdentNode.setParent(tabRefNode);
         dot.addChild(tabRefNode);
-        tabRefNode.setParent(dot);
 
         ASTNode colIdentNode = new ASTNode(new CommonToken(HiveParser.Identifier, colName));
 

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
index 9472506..7f13c6c 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
@@ -101,7 +101,7 @@ public class AutoJoinContext {
   }
 
   private JoinClause getJoinClause(CandidateFact fact) {
-    if (fact == null) {
+    if (fact == null || !factClauses.containsKey(fact)) {
       return minCostClause;
     }
     return factClauses.get(fact);

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 2338ba7..c305244 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -37,14 +37,14 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import org.antlr.runtime.CommonToken;
 
 import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 import lombok.Getter;
+import lombok.Setter;
 
 /**
  * Holds context of a candidate fact table.
  */
-public class CandidateFact implements CandidateTable {
+public class CandidateFact implements CandidateTable, QueryAST {
   final CubeFactTable fact;
   @Getter
   private Set<String> storageTables;
@@ -52,27 +52,32 @@ public class CandidateFact implements CandidateTable {
   private int numQueriedParts = 0;
   @Getter
   private final Set<FactPartition> partsQueried = Sets.newHashSet();
-  @Getter
-  private final Map<TimeRange, String> rangeToWhereClause = Maps.newHashMap();
 
   private CubeInterface baseTable;
+  @Getter @Setter
   private ASTNode selectAST;
+  @Getter @Setter
   private ASTNode whereAST;
-  private ASTNode groupbyAST;
+  @Getter @Setter
+  private ASTNode groupByAST;
+  @Getter @Setter
   private ASTNode havingAST;
-  private ASTNode joinTree;
+  @Getter @Setter
+  private ASTNode joinAST;
+  @Getter @Setter
+  private ASTNode orderByAST;
+  @Getter @Setter
+  private Integer limitValue;
   private List<TimeRangeNode> timenodes = Lists.newArrayList();
   private final List<Integer> selectIndices = Lists.newArrayList();
   private final List<Integer> dimFieldIndices = Lists.newArrayList();
   private Collection<String> columns;
   @Getter
-  private final Map<String, String> storgeWhereClauseMap = new HashMap<String, String>();
+  private final Map<String, String> storgeWhereClauseMap = new HashMap<>();
   @Getter
-  private final Map<TimeRange, Map<String, LinkedHashSet<FactPartition>>> rangeToStoragePartMap =
-    new HashMap<TimeRange, Map<String, LinkedHashSet<FactPartition>>>();
+  private final Map<TimeRange, Map<String, LinkedHashSet<FactPartition>>> rangeToStoragePartMap = new HashMap<>();
   @Getter
-  private final Map<TimeRange, Map<String, String>> rangeToStorageWhereMap =
-    new HashMap<TimeRange, Map<String, String>>();
+  private final Map<TimeRange, Map<String, String>> rangeToStorageWhereMap = new HashMap<>();
 
   CandidateFact(CubeFactTable fact, CubeInterface cube) {
     this.fact = fact;
@@ -114,57 +119,25 @@ public class CandidateFact implements CandidateTable {
     numQueriedParts += incr;
   }
 
-  private void updateTimeRanges(ASTNode root, ASTNode parent, int childIndex) throws LensException {
-    if (root == null) {
-      return;
-    } else if (root.getToken().getType() == TOK_FUNCTION) {
-      ASTNode fname = HQLParser.findNodeByPath(root, Identifier);
-      if (fname != null && CubeQueryContext.TIME_RANGE_FUNC.equalsIgnoreCase(fname.getText())) {
-        timenodes.add(new TimeRangeNode(root, parent, childIndex));
-      }
-    } else {
-      for (int i = 0; i < root.getChildCount(); i++) {
-        ASTNode child = (ASTNode) root.getChild(i);
-        updateTimeRanges(child, root, i);
-      }
-    }
-  }
-
   // copy ASTs from CubeQueryContext
   public void copyASTs(CubeQueryContext cubeql) throws LensException {
-    this.selectAST = HQLParser.copyAST(cubeql.getSelectAST());
-    this.whereAST = HQLParser.copyAST(cubeql.getWhereAST());
-    if (cubeql.getJoinTree() != null) {
-      this.joinTree = HQLParser.copyAST(cubeql.getJoinTree());
+    setSelectAST(HQLParser.copyAST(cubeql.getSelectAST()));
+    setWhereAST(HQLParser.copyAST(cubeql.getWhereAST()));
+    if (cubeql.getJoinAST() != null) {
+      setJoinAST(HQLParser.copyAST(cubeql.getJoinAST()));
     }
     if (cubeql.getGroupByAST() != null) {
-      this.groupbyAST = HQLParser.copyAST(cubeql.getGroupByAST());
+      setGroupByAST(HQLParser.copyAST(cubeql.getGroupByAST()));
     }
     if (cubeql.getHavingAST() != null) {
-      this.havingAST = HQLParser.copyAST(cubeql.getHavingAST());
+      setHavingAST(HQLParser.copyAST(cubeql.getHavingAST()));
     }
-    // copy timeranges
-    updateTimeRanges(this.whereAST, null, 0);
   }
 
   public String getWhereClause(String storageTable) {
     return getStorgeWhereClauseMap().get(storageTable);
   }
 
-  public void updateTimeranges(CubeQueryContext cubeql) throws LensException {
-    // Update WhereAST with range clause
-    // resolve timerange positions and replace it by corresponding where clause
-    for (int i = 0; i < cubeql.getTimeRanges().size(); i++) {
-      TimeRange range = cubeql.getTimeRanges().get(i);
-      String rangeWhere = rangeToWhereClause.get(range);
-      if (!StringUtils.isBlank(rangeWhere)) {
-        ASTNode rangeAST = HQLParser.parseExpr(rangeWhere);
-        rangeAST.setParent(timenodes.get(i).parent);
-        timenodes.get(i).parent.setChild(timenodes.get(i).childIndex, rangeAST);
-      }
-    }
-  }
-
   /**
    * Update the ASTs to include only the fields queried from this fact, in all the expressions
    *
@@ -322,54 +295,15 @@ public class CandidateFact implements CandidateTable {
     return null;
   }
 
-  public String getGroupbyTree() {
-    if (groupbyAST != null) {
-      return HQLParser.getString(groupbyAST);
+  @Override
+  public String getOrderByTree() {
+    if (orderByAST != null) {
+      return HQLParser.getString(orderByAST);
     }
     return null;
   }
 
-  /**
-   * @return the selectAST
-   */
-  public ASTNode getSelectAST() {
-    return selectAST;
-  }
-
-  /**
-   * @param selectAST the selectAST to set
-   */
-  public void setSelectAST(ASTNode selectAST) {
-    this.selectAST = selectAST;
-  }
-
-  /**
-   * @return the whereAST
-   */
-  public ASTNode getWhereAST() {
-    return whereAST;
-  }
 
-  /**
-   * @param whereAST the whereAST to set
-   */
-  public void setWhereAST(ASTNode whereAST) {
-    this.whereAST = whereAST;
-  }
-
-  /**
-   * @return the havingAST
-   */
-  public ASTNode getHavingAST() {
-    return havingAST;
-  }
-
-  /**
-   * @param havingAST the havingAST to set
-   */
-  public void setHavingAST(ASTNode havingAST) {
-    this.havingAST = havingAST;
-  }
 
   /**
    * @return the selectIndices
@@ -385,13 +319,9 @@ public class CandidateFact implements CandidateTable {
     return dimFieldIndices;
   }
 
-  public ASTNode getGroupByAST() {
-    return groupbyAST;
-  }
-
   public String getGroupByTree() {
-    if (groupbyAST != null) {
-      return HQLParser.getString(groupbyAST);
+    if (groupByAST != null) {
+      return HQLParser.getString(groupByAST);
     }
     return null;
   }
@@ -413,8 +343,4 @@ public class CandidateFact implements CandidateTable {
     }
     return timePartDimensions;
   }
-
-  public ASTNode getJoinTree() {
-    return joinTree;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
index b95595a..75aa3f4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
@@ -61,7 +61,7 @@ class ColumnResolver implements ContextRewriter {
     }
     getColsForSelectTree(cubeql);
     getColsForWhereTree(cubeql);
-    getColsForTree(cubeql, cubeql.getJoinTree(), cubeql);
+    getColsForTree(cubeql, cubeql.getJoinAST(), cubeql);
     getColsForTree(cubeql, cubeql.getGroupByAST(), cubeql);
     getColsForTree(cubeql, cubeql.getHavingAST(), cubeql);
     getColsForTree(cubeql, cubeql.getOrderByAST(), cubeql);

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 4034a54..1fd1d17 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -21,9 +21,7 @@ package org.apache.lens.cube.parse;
 
 import static org.apache.lens.cube.parse.CubeQueryConfUtil.*;
 
-import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TABLE_OR_COL;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TMP_FILE;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
 import static com.google.common.base.Preconditions.checkArgument;
 
@@ -50,16 +48,11 @@ import org.codehaus.jackson.map.ObjectMapper;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
-
-import lombok.AllArgsConstructor;
-import lombok.Data;
-import lombok.Getter;
-import lombok.Setter;
-import lombok.ToString;
+import lombok.*;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
-public class CubeQueryContext implements TrackQueriedColumns {
+public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   public static final String TIME_RANGE_FUNC = "time_range_in";
   public static final String NOW = "now";
   public static final String DEFAULT_TABLE = "_default_";
@@ -653,7 +646,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return null;
   }
 
-  public ASTNode getJoinTree() {
+  public ASTNode getJoinAST() {
     return qb.getParseInfo().getJoinExpr();
   }
 
@@ -688,8 +681,8 @@ public class CubeQueryContext implements TrackQueriedColumns {
   }
 
   String getQBFromString(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
-    String fromString = null;
-    if (getJoinTree() == null) {
+    String fromString;
+    if (getJoinAST() == null) {
       if (cube != null) {
         fromString = fact.getStorageString(getAliasForTableName(cube.getName()));
       } else {
@@ -858,6 +851,23 @@ public class CubeQueryContext implements TrackQueriedColumns {
   @Getter private Collection<CandidateFact> pickedFacts;
   @Getter private Collection<CandidateDim> pickedDimTables;
 
+  private void addRangeClauses(CandidateFact fact) throws LensException {
+    if (fact != null) {
+      // resolve timerange positions and replace it by corresponding where clause
+      for (TimeRange range : getTimeRanges()) {
+        for (Map.Entry<String, String> entry : fact.getRangeToStorageWhereMap().get(range).entrySet()) {
+          String table = entry.getKey();
+          String rangeWhere = entry.getValue();
+          if (!StringUtils.isBlank(rangeWhere)) {
+            ASTNode rangeAST = HQLParser.parseExpr(rangeWhere);
+            range.getParent().setChild(range.getChildIndex(), rangeAST);
+          }
+          fact.getStorgeWhereClauseMap().put(table, getWhereTree());
+        }
+      }
+    }
+  }
+
   public String toHQL() throws LensException {
     Set<CandidateFact> cfacts = pickCandidateFactToQuery();
     Map<Dimension, CandidateDim> dimsToQuery = pickCandidateDimsToQuery(dimensions);
@@ -872,11 +882,13 @@ public class CubeQueryContext implements TrackQueriedColumns {
         // copy ASTs for each fact
         for (CandidateFact cfact : cfacts) {
           cfact.copyASTs(this);
-          cfact.updateTimeranges(this);
-          factDimMap.put(cfact, new HashSet<Dimension>(dimsToQuery.keySet()));
+          factDimMap.put(cfact, new HashSet<>(dimsToQuery.keySet()));
         }
-      } else {
-        SingleFactHQLContext.addRangeClauses(this, cfacts.iterator().next());
+      }
+    }
+    if (cfacts != null) {
+      for (CandidateFact fact : cfacts) {
+        addRangeClauses(fact);
       }
     }
 
@@ -884,7 +896,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     Set<Dimension> exprDimensions = new HashSet<Dimension>();
     if (cfacts != null) {
       for (CandidateFact cfact : cfacts) {
-        Set<Dimension> factExprDimTables = exprCtx.rewriteExprCtx(cfact, dimsToQuery, cfacts.size() > 1);
+        Set<Dimension> factExprDimTables = exprCtx.rewriteExprCtx(cfact, dimsToQuery, cfacts.size() > 1 ? cfact : this);
         exprDimensions.addAll(factExprDimTables);
         if (cfacts.size() > 1) {
           factDimMap.get(cfact).addAll(factExprDimTables);
@@ -892,7 +904,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
       }
     } else {
       // dim only query
-      exprDimensions.addAll(exprCtx.rewriteExprCtx(null, dimsToQuery, false));
+      exprDimensions.addAll(exprCtx.rewriteExprCtx(null, dimsToQuery, this));
     }
     dimsToQuery.putAll(pickCandidateDimsToQuery(exprDimensions));
 
@@ -940,25 +952,22 @@ public class CubeQueryContext implements TrackQueriedColumns {
         }
       }
     }
-    hqlContext = createHQLContext(cfacts, dimsToQuery, factDimMap, this);
+    hqlContext = createHQLContext(cfacts, dimsToQuery, factDimMap);
     return hqlContext.toHQL();
   }
 
   private HQLContextInterface createHQLContext(Set<CandidateFact> facts, Map<Dimension, CandidateDim> dimsToQuery,
-    Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws LensException {
+    Map<CandidateFact, Set<Dimension>> factDimMap) throws LensException {
     if (facts == null || facts.size() == 0) {
-      return new DimOnlyHQLContext(dimsToQuery, query);
+      return new DimOnlyHQLContext(dimsToQuery, this, this);
     } else if (facts.size() == 1 && facts.iterator().next().getStorageTables().size() > 1) {
       //create single fact with multiple storage context
-      if (!conf.getBoolean(ENABLE_STORAGES_UNION, DEFAULT_ENABLE_STORAGES_UNION)) {
-        throw new LensException(LensCubeErrorCode.STORAGE_UNION_DISABLED.getLensErrorInfo());
-      }
-      return new SingleFactMultiStorageHQLContext(facts.iterator().next(), dimsToQuery, query);
+      return new SingleFactMultiStorageHQLContext(facts.iterator().next(), dimsToQuery, this, this);
     } else if (facts.size() == 1 && facts.iterator().next().getStorageTables().size() == 1) {
       // create single fact context
-      return new SingleFactHQLContext(facts.iterator().next(), dimsToQuery, query);
+      return new SingleFactSingleStorageHQLContext(facts.iterator().next(), dimsToQuery, this, this);
     } else {
-      return new MultiFactHQLContext(facts, dimsToQuery, factDimMap, query);
+      return new MultiFactHQLContext(facts, dimsToQuery, factDimMap, this);
     }
   }
 
@@ -979,10 +988,6 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return tblAliasToColumns.get(getAliasForTableName(tblName));
   }
 
-  public void addColumnsQueried(AbstractCubeTable table, String column) {
-    addColumnsQueried(getAliasForTableName(table.getName()), column);
-  }
-
   public void addColumnsQueriedWithTimeDimCheck(String alias, String timeDimColumn) {
 
     if (!shouldReplaceTimeDimWithPart()) {

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
index e0759b0..c1fd0a5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
@@ -148,7 +148,7 @@ public class CubeQueryRewriter {
     // Resolve candidate fact tables and dimension tables for columns queried
     rewriters.add(candidateTblResolver);
     // Resolve aggregations and generate base select tree
-    rewriters.add(new AggregateResolver(conf));
+    rewriters.add(new AggregateResolver());
     rewriters.add(new GroupbyResolver(conf));
     rewriters.add(new FieldValidator());
     // Resolve joins and generate base join tree

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
new file mode 100644
index 0000000..0997f37
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+
+import lombok.AllArgsConstructor;
+import lombok.Data;
+
+@Data
+@AllArgsConstructor
+public class DefaultQueryAST implements QueryAST {
+  private ASTNode selectAST, whereAST, groupByAST, havingAST, joinAST, orderByAST;
+  private Integer limitValue;
+
+  public String getSelectTree() {
+    return HQLParser.getString(selectAST);
+  }
+
+  public String getWhereTree() {
+    if (whereAST != null) {
+      return HQLParser.getString(whereAST);
+    }
+    return null;
+  }
+
+  public String getGroupByTree() {
+    if (groupByAST != null) {
+      return HQLParser.getString(groupByAST);
+    }
+    return null;
+  }
+
+
+  public String getHavingTree() {
+    if (havingAST != null) {
+      return HQLParser.getString(havingAST);
+    }
+    return null;
+  }
+
+  @Override
+  public String getOrderByTree() {
+    if (orderByAST != null) {
+      return HQLParser.getString(orderByAST);
+    }
+    return null;
+  }
+
+  public static DefaultQueryAST fromCandidateFact(CandidateFact fact, String storageTable, QueryAST ast) throws
+    LensException {
+    return new DefaultQueryAST(ast.getSelectAST(),
+      HQLParser.parseExpr(fact.getWhereClause(storageTable.substring(storageTable.indexOf(".") + 1))),
+      ast.getGroupByAST(), ast.getHavingAST(), ast.getJoinAST(), ast.getOrderByAST(), ast.getLimitValue());
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index a576f3a..5c8bd84 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -281,19 +281,15 @@ public class DenormalizationResolver implements ContextRewriter {
     }
 
     private void replaceReferencedColumns(CandidateFact cfact, boolean replaceFact) throws LensException {
+      QueryAST ast = cubeql;
       if (replaceFact
         && (tableToRefCols.get(cfact.getName()) != null && !tableToRefCols.get(cfact.getName()).isEmpty())) {
-        resolveClause(cubeql, cfact.getSelectAST());
-        resolveClause(cubeql, cfact.getWhereAST());
-        resolveClause(cubeql, cfact.getGroupByAST());
-        resolveClause(cubeql, cfact.getHavingAST());
-      } else {
-        resolveClause(cubeql, cubeql.getSelectAST());
-        resolveClause(cubeql, cubeql.getWhereAST());
-        resolveClause(cubeql, cubeql.getGroupByAST());
-        resolveClause(cubeql, cubeql.getHavingAST());
-
+        ast = cfact;
       }
+      resolveClause(cubeql, ast.getSelectAST());
+      resolveClause(cubeql, ast.getWhereAST());
+      resolveClause(cubeql, ast.getGroupByAST());
+      resolveClause(cubeql, ast.getHavingAST());
       resolveClause(cubeql, cubeql.getOrderByAST());
     }
 
@@ -320,11 +316,9 @@ public class DenormalizationResolver implements ContextRewriter {
         ASTNode newTableNode =
           new ASTNode(new CommonToken(HiveParser.Identifier, query.getAliasForTableName(refered.getDestTable())));
         tableNode.setChild(0, newTableNode);
-        newTableNode.setParent(tableNode);
 
         ASTNode newColumnNode = new ASTNode(new CommonToken(HiveParser.Identifier, refered.getRefColumn()));
         node.setChild(1, newColumnNode);
-        newColumnNode.setParent(node);
       } else {
         // recurse down
         for (int i = 0; i < node.getChildCount(); i++) {

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
index bcfc1f6..b253b94 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
@@ -44,7 +44,11 @@ abstract class DimHQLContext extends SimpleHQLContext {
   public CubeQueryContext getQuery() {
     return query;
   }
-
+  DimHQLContext(CubeQueryContext query, Map<Dimension, CandidateDim> dimsToQuery,
+    Set<Dimension> queriedDims, QueryAST ast) throws LensException {
+    this(query, dimsToQuery, queriedDims, ast.getSelectTree(), ast.getWhereTree(), ast.getGroupByTree(),
+      ast.getOrderByTree(), ast.getHavingTree(), ast.getLimitValue());
+  }
   DimHQLContext(CubeQueryContext query, Map<Dimension, CandidateDim> dimsToQuery,
     Set<Dimension> queriedDims, String select, String where,
     String groupby, String orderby, String having, Integer limit) throws LensException {

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
index 0c43d98..d22287b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
@@ -32,16 +32,15 @@ import org.apache.lens.server.api.error.LensException;
  */
 class DimOnlyHQLContext extends DimHQLContext {
 
-  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query) throws LensException {
-    super(query, dimsToQuery, dimsToQuery.keySet(), query.getSelectTree(),
-      query.getWhereTree(), query.getGroupByTree(), query.getOrderByTree(),
-      query.getHavingTree(), query.getLimitValue());
+  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query, QueryAST ast)
+    throws LensException {
+    this(dimsToQuery, dimsToQuery.keySet(), query, ast);
   }
 
-  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query, String whereClause)
+  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, Set<Dimension> dimsQueried,
+    CubeQueryContext query, QueryAST ast)
     throws LensException {
-    super(query, dimsToQuery, dimsToQuery.keySet(), query.getSelectTree(), whereClause, query.getGroupByTree(), query
-        .getOrderByTree(), query.getHavingTree(), query.getLimitValue());
+    super(query, dimsToQuery, dimsQueried, ast);
   }
 
   public String toHQL() throws LensException {
@@ -49,7 +48,7 @@ class DimOnlyHQLContext extends DimHQLContext {
   }
 
   protected String getFromTable() throws LensException {
-    if (query.getAutoJoinCtx() != null && query.getAutoJoinCtx().isJoinsResolved()) {
+    if (query.isAutoJoinResolved()) {
       return getDimsToQuery().get(query.getAutoJoinCtx().getAutoJoinTarget()).getStorageString(
         query.getAliasForTableName(query.getAutoJoinCtx().getAutoJoinTarget().getName()));
     } else {

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 776021d..26514d8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -432,7 +432,7 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     public Set<Dimension> rewriteExprCtx(CandidateFact cfact, Map<Dimension, CandidateDim> dimsToQuery,
-      boolean replaceFact) throws LensException {
+      QueryAST queryAST) throws LensException {
       Set<Dimension> exprDims = new HashSet<Dimension>();
       if (!allExprsQueried.isEmpty()) {
         // pick expressions for fact
@@ -446,7 +446,7 @@ class ExpressionResolver implements ContextRewriter {
           }
         }
         // Replace picked expressions in all the base trees
-        replacePickedExpressions(cfact, replaceFact);
+        replacePickedExpressions(queryAST);
         for (Set<PickedExpression> peSet : pickedExpressions.values()) {
           for (PickedExpression pe : peSet) {
             exprDims.addAll(pe.pickedCtx.exprDims);
@@ -457,21 +457,13 @@ class ExpressionResolver implements ContextRewriter {
       return exprDims;
     }
 
-    private void replacePickedExpressions(CandidateFact cfact, boolean replaceFact)
+    private void replacePickedExpressions(QueryAST queryAST)
       throws LensException {
-      if (replaceFact) {
-        replaceAST(cubeql, cfact.getSelectAST());
-        replaceAST(cubeql, cfact.getWhereAST());
-        replaceAST(cubeql, cfact.getJoinTree());
-        replaceAST(cubeql, cfact.getGroupByAST());
-        replaceAST(cubeql, cfact.getHavingAST());
-      } else {
-        replaceAST(cubeql, cubeql.getSelectAST());
-        replaceAST(cubeql, cubeql.getWhereAST());
-        replaceAST(cubeql, cubeql.getJoinTree());
-        replaceAST(cubeql, cubeql.getGroupByAST());
-        replaceAST(cubeql, cubeql.getHavingAST());
-      }
+      replaceAST(cubeql, queryAST.getSelectAST());
+      replaceAST(cubeql, queryAST.getWhereAST());
+      replaceAST(cubeql, queryAST.getJoinAST());
+      replaceAST(cubeql, queryAST.getGroupByAST());
+      replaceAST(cubeql, queryAST.getHavingAST());
       replaceAST(cubeql, cubeql.getOrderByAST());
     }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
deleted file mode 100644
index 6c44233..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.server.api.error.LensException;
-
-import lombok.extern.slf4j.Slf4j;
-
-/**
- * HQL context class which passes all query strings from the fact and works with required dimensions for the fact.
- */
-@Slf4j
-public class FactHQLContext extends DimHQLContext {
-
-  private final CandidateFact fact;
-  private final Set<Dimension> factDims;
-
-  FactHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, Set<Dimension> factDims,
-    CubeQueryContext query) throws LensException {
-    super(query, dimsToQuery, factDims, fact.getSelectTree(), fact.getWhereTree(), fact.getGroupByTree(), null, fact
-      .getHavingTree(), null);
-    this.fact = fact;
-    this.factDims = factDims;
-    log.info("factDims:{} for fact:{}", factDims, fact);
-  }
-
-  @Override
-  protected Set<Dimension> getQueriedDimSet() {
-    return factDims;
-  }
-
-  @Override
-  protected CandidateFact getQueriedFact() {
-    return fact;
-  }
-
-  protected String getFromTable() throws LensException {
-    return query.getQBFromString(fact, getDimsToQuery());
-  }
-
-  public CandidateFact getFactToQuery() {
-    return fact;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
index da74713..9674f73 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
@@ -78,7 +78,6 @@ class GroupbyResolver implements ContextRewriter {
               if (groupbyAST != null) {
                 // groupby ast exists, add the expression to AST
                 groupbyAST.addChild(exprAST);
-                exprAST.setParent(groupbyAST);
               } else {
                 // no group by ast exist, create one
                 ASTNode newAST = new ASTNode(new CommonToken(TOK_GROUPBY));
@@ -153,7 +152,6 @@ class GroupbyResolver implements ContextRewriter {
       parent.setChild(i + 1, ch);
     }
     parent.setChild(index, child);
-    child.setParent(parent);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index 7cea7d5..6c3d4c3 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -293,7 +293,6 @@ public final class HQLParser {
     if (original.getChildren() != null) {
       for (Object o : original.getChildren()) {
         ASTNode childCopy = copyAST((ASTNode) o);
-        childCopy.setParent(copy);
         copy.addChild(childCopy);
       }
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index 1385584..de3a16e 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -87,8 +87,8 @@ class JoinResolver implements ContextRewriter {
     boolean joinResolverDisabled = cubeql.getConf().getBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS,
         CubeQueryConfUtil.DEFAULT_DISABLE_AUTO_JOINS);
     if (joinResolverDisabled) {
-      if (cubeql.getJoinTree() != null) {
-        cubeQB.setQbJoinTree(genJoinTree(cubeQB, cubeql.getJoinTree(), cubeql));
+      if (cubeql.getJoinAST() != null) {
+        cubeQB.setQbJoinTree(genJoinTree(cubeql.getJoinAST(), cubeql));
       }
     } else {
       autoResolveJoins(cubeql);
@@ -336,7 +336,7 @@ class JoinResolver implements ContextRewriter {
   }
 
   // Recursively find out join conditions
-  private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree, CubeQueryContext cubeql) throws LensException {
+  private QBJoinTree genJoinTree(ASTNode joinParseTree, CubeQueryContext cubeql) throws LensException {
     QBJoinTree joinTree = new QBJoinTree();
     JoinCond[] condn = new JoinCond[1];
 
@@ -388,7 +388,7 @@ class JoinResolver implements ContextRewriter {
 
     } else if (isJoinToken(left)) {
       // Left subtree is join token itself, so recurse down
-      QBJoinTree leftTree = genJoinTree(qb, left, cubeql);
+      QBJoinTree leftTree = genJoinTree(left, cubeql);
 
       joinTree.setJoinSrc(leftTree);
 
@@ -436,12 +436,9 @@ class JoinResolver implements ContextRewriter {
     return joinTree;
   }
 
-  private boolean isJoinToken(ASTNode node) {
-    if ((node.getToken().getType() == TOK_JOIN) || (node.getToken().getType() == TOK_LEFTOUTERJOIN)
+  private static boolean isJoinToken(ASTNode node) {
+    return (node.getToken().getType() == TOK_JOIN) || (node.getToken().getType() == TOK_LEFTOUTERJOIN)
       || (node.getToken().getType() == TOK_RIGHTOUTERJOIN) || (node.getToken().getType() == TOK_FULLOUTERJOIN)
-      || (node.getToken().getType() == TOK_LEFTSEMIJOIN) || (node.getToken().getType() == TOK_UNIQUEJOIN)) {
-      return true;
-    }
-    return false;
+      || (node.getToken().getType() == TOK_LEFTSEMIJOIN) || (node.getToken().getType() == TOK_UNIQUEJOIN);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
index 113d8de..1a729f8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
@@ -34,18 +34,24 @@ import com.google.common.collect.Lists;
  */
 class MultiFactHQLContext extends SimpleHQLContext {
 
-  private Map<Dimension, CandidateDim> dimsToQuery;
   private Set<CandidateFact> facts;
   private CubeQueryContext query;
-  private Map<CandidateFact, Set<Dimension>> factDimMap;
+  private Map<CandidateFact, SimpleHQLContext> factHQLContextMap = new HashMap<>();
 
   MultiFactHQLContext(Set<CandidateFact> facts, Map<Dimension, CandidateDim> dimsToQuery,
     Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws LensException {
     super();
     this.query = query;
     this.facts = facts;
-    this.dimsToQuery = dimsToQuery;
-    this.factDimMap = factDimMap;
+    for (CandidateFact fact : facts) {
+      if (fact.getStorageTables().size() > 1) {
+        factHQLContextMap.put(fact, new SingleFactMultiStorageHQLContext(fact, dimsToQuery, query, fact));
+      } else {
+        factHQLContextMap.put(fact,
+          new SingleFactSingleStorageHQLContext(fact, dimsToQuery, factDimMap.get(fact), query,
+            DefaultQueryAST.fromCandidateFact(fact, fact.getStorageTables().iterator().next(), fact)));
+      }
+    }
   }
 
   protected void setMissingExpressions() throws LensException {
@@ -78,8 +84,7 @@ class MultiFactHQLContext extends SimpleHQLContext {
   }
 
   private String getSelectString() throws LensException {
-    Map<Integer, List<Integer>> selectToFactIndex =
-      new HashMap<Integer, List<Integer>>(query.getSelectAST().getChildCount());
+    Map<Integer, List<Integer>> selectToFactIndex = new HashMap<>(query.getSelectAST().getChildCount());
     int fi = 1;
     for (CandidateFact fact : facts) {
       for (int ind : fact.getSelectIndices()) {
@@ -116,33 +121,14 @@ class MultiFactHQLContext extends SimpleHQLContext {
     return select.toString();
   }
 
-  public Map<Dimension, CandidateDim> getDimsToQuery() {
-    return dimsToQuery;
-  }
-
-  public Set<CandidateFact> getFactsToQuery() {
-    return facts;
-  }
-
   private String getFromString() throws LensException {
     StringBuilder fromBuilder = new StringBuilder();
     int aliasCount = 1;
-    Iterator<CandidateFact> iter = facts.iterator();
-    while (iter.hasNext()) {
-      CandidateFact fact = iter.next();
-      if (fact.getStorageTables().size() > 1) {
-        // Not supported right now.
-        throw new LensException(LensCubeErrorCode.STORAGE_UNION_DISABLED.getLensErrorInfo());
-      }
-      FactHQLContext facthql = new FactHQLContext(fact, dimsToQuery, factDimMap.get(fact), query);
-      fromBuilder.append("(");
-      fromBuilder.append(facthql.toHQL());
-      fromBuilder.append(")");
-      fromBuilder.append(" mq" + aliasCount);
-      aliasCount++;
-      if (iter.hasNext()) {
-        fromBuilder.append(" full outer join ");
-      }
+    String sep = "";
+    for (CandidateFact fact : facts) {
+      SimpleHQLContext facthql = factHQLContextMap.get(fact);
+      fromBuilder.append(sep).append("(").append(facthql.toHQL()).append(")").append(" mq").append(aliasCount++);
+      sep = " full outer join ";
     }
     CandidateFact firstFact = facts.iterator().next();
     if (!firstFact.getDimFieldIndices().isEmpty()) {

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
new file mode 100644
index 0000000..31680ca
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+
+
+interface QueryAST {
+
+  String getSelectTree();
+
+  String getWhereTree();
+
+  String getHavingTree();
+
+  String getOrderByTree();
+
+  String getGroupByTree();
+
+  Integer getLimitValue();
+
+  void setLimitValue(Integer integer);
+
+  /**
+   * @return the selectAST
+   */
+
+  ASTNode getSelectAST();
+
+  /**
+   * @param selectAST the selectAST to set
+   */
+
+  void setSelectAST(ASTNode selectAST);
+
+  /**
+   * @return the whereAST
+   */
+
+  ASTNode getWhereAST();
+
+  /**
+   * @param whereAST the whereAST to set
+   */
+
+  void setWhereAST(ASTNode whereAST);
+
+  /**
+   * @return the havingAST
+   */
+
+  ASTNode getHavingAST();
+
+  /**
+   * @param havingAST the havingAST to set
+   */
+
+  void setHavingAST(ASTNode havingAST);
+
+  ASTNode getGroupByAST();
+
+  void setGroupByAST(ASTNode havingAST);
+
+  ASTNode getJoinAST();
+
+  ASTNode getOrderByAST();
+
+  void setOrderByAST(ASTNode node);
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
deleted file mode 100644
index de52b0a..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import java.util.Map;
-
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.cube.metadata.TimeRange;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-
-/**
- * HQL context class which passes down all query strings to come from DimOnlyHQLContext and works with fact being
- * queried.
- * <p/>
- * Updates from string with join clause expanded
- */
-class SingleFactHQLContext extends DimOnlyHQLContext {
-
-  private final CandidateFact fact;
-  private String storageAlias;
-
-  SingleFactHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
-    throws LensException {
-    super(dimsToQuery, query);
-    this.fact = fact;
-  }
-
-  SingleFactHQLContext(CandidateFact fact, String storageAlias, Map<Dimension, CandidateDim> dimsToQuery,
-      CubeQueryContext query, String whereClause) throws LensException {
-    super(dimsToQuery, query, whereClause);
-    this.fact = fact;
-    this.storageAlias = storageAlias;
-  }
-
-
-  public CandidateFact getFactToQuery() {
-    return fact;
-  }
-
-  static void addRangeClauses(CubeQueryContext query, CandidateFact fact) throws LensException {
-    if (fact != null) {
-      // resolve timerange positions and replace it by corresponding where
-      // clause
-      for (TimeRange range : query.getTimeRanges()) {
-        for (Map.Entry<String, String> entry : fact.getRangeToStorageWhereMap().get(range).entrySet()) {
-          String table = entry.getValue();
-          String rangeWhere = entry.getKey();
-
-          if (!StringUtils.isBlank(rangeWhere)) {
-            ASTNode rangeAST = HQLParser.parseExpr(rangeWhere);
-            rangeAST.setParent(range.getParent());
-            range.getParent().setChild(range.getChildIndex(), rangeAST);
-          }
-          fact.getStorgeWhereClauseMap().put(table, query.getWhereTree());
-        }
-      }
-    }
-  }
-
-
-  @Override
-  protected String getFromTable() throws LensException {
-    if (getQuery().getAutoJoinCtx() != null && getQuery().getAutoJoinCtx().isJoinsResolved()) {
-      if (storageAlias != null) {
-        return storageAlias;
-      } else {
-        return fact.getStorageString(getQuery().getAliasForTableName(getQuery().getCube().getName()));
-      }
-    } else {
-      if (fact.getStorageTables().size() == 1) {
-        return getQuery().getQBFromString(fact, getDimsToQuery());
-      } else {
-        return storageAlias;
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
index 96b1d05..e531e6b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
@@ -19,6 +19,8 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.DEFAULT_ENABLE_STORAGES_UNION;
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.ENABLE_STORAGES_UNION;
 import static org.apache.lens.cube.parse.HQLParser.*;
 
 import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
@@ -27,6 +29,7 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.Dimension;
 import org.apache.lens.server.api.error.LensException;
 
@@ -42,6 +45,7 @@ import lombok.Data;
 
 public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
 
+  private final QueryAST ast;
   int aliasCounter = 0;
 
   @Data
@@ -81,29 +85,33 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
 
   private Map<HashableASTNode, ASTNode> innerToOuterASTs = new HashMap<>();
 
-  SingleFactMultiStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
+  SingleFactMultiStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery,
+    CubeQueryContext query, QueryAST ast)
     throws LensException {
     super(query, fact);
+    if (!query.getConf().getBoolean(ENABLE_STORAGES_UNION, DEFAULT_ENABLE_STORAGES_UNION)) {
+      throw new LensException(LensCubeErrorCode.STORAGE_UNION_DISABLED.getLensErrorInfo());
+    }
+    this.ast = ast;
     processSelectAST();
     processGroupByAST();
     processWhereAST();
     processHavingAST();
     processOrderByAST();
     processLimit();
-    setHqlContexts(getUnionContexts(fact, dimsToQuery, query));
+    setHqlContexts(getUnionContexts(fact, dimsToQuery, query, ast));
   }
 
   private void processSelectAST() {
-    query.getSelectFinalAliases().clear();
-    ASTNode originalSelectAST = copyAST(query.getSelectAST());
-    query.setSelectAST(new ASTNode(originalSelectAST.getToken()));
+    ASTNode originalSelectAST = copyAST(ast.getSelectAST());
+    ast.setSelectAST(new ASTNode(originalSelectAST.getToken()));
     ASTNode outerSelectAST = processExpression(originalSelectAST);
     setSelect(getString(outerSelectAST));
   }
 
   private void processGroupByAST() {
-    if (query.getGroupByAST() != null) {
-      setGroupby(getString(processExpression(query.getGroupByAST())));
+    if (ast.getGroupByAST() != null) {
+      setGroupby(getString(processExpression(ast.getGroupByAST())));
     }
   }
 
@@ -111,28 +119,29 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     for (String storageTable : fact.getStorgeWhereClauseMap().keySet()) {
       ASTNode tree = parseExpr(fact.getStorgeWhereClauseMap().get(storageTable));
       ASTNode replaced = replaceAST(tree);
+      //TODO: optimize parse/unparse cycle
       fact.getStorgeWhereClauseMap().put(storageTable, getString(replaced));
     }
   }
 
   private void processHavingAST() throws LensException {
-    if (query.getHavingAST() != null) {
-      setHaving(getString(processExpression(query.getHavingAST())));
-      query.setHavingAST(null);
+    if (ast.getHavingAST() != null) {
+      setHaving(getString(processExpression(ast.getHavingAST())));
+      ast.setHavingAST(null);
     }
   }
 
 
   private void processOrderByAST() {
-    if (query.getOrderByAST() != null) {
-      setOrderby(getString(processExpression(query.getOrderByAST())));
-      query.setOrderByAST(null);
+    if (ast.getOrderByAST() != null) {
+      setOrderby(getString(processExpression(ast.getOrderByAST())));
+      ast.setOrderByAST(null);
     }
   }
 
   private void processLimit() {
-    setLimit(query.getLimitValue());
-    query.setLimitValue(null);
+    setLimit(ast.getLimitValue());
+    ast.setLimitValue(null);
   }
 
   /*
@@ -171,6 +180,7 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
       addToInnerSelectAST(innerSelectExprAST);
       ASTNode dotAST = getDotAST(query.getCube().getName(), alias);
       ASTNode outerAST = new ASTNode(new CommonToken(TOK_FUNCTION));
+      //TODO: take care or non-transitive aggregate functions
       outerAST.addChild(new ASTNode(new CommonToken(Identifier, astNode.getChild(0).getText())));
       outerAST.addChild(dotAST);
       innerToOuterASTs.put(new HashableASTNode(innerSelectASTWithoutAlias), outerAST);
@@ -225,10 +235,10 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
   }
 
   private void addToInnerSelectAST(ASTNode selectExprAST) {
-    if (query.getSelectAST() == null) {
-      query.setSelectAST(new ASTNode(new CommonToken(TOK_SELECT)));
+    if (ast.getSelectAST() == null) {
+      ast.setSelectAST(new ASTNode(new CommonToken(TOK_SELECT)));
     }
-    query.getSelectAST().addChild(selectExprAST);
+    ast.getSelectAST().addChild(selectExprAST);
   }
 
   private ASTNode getDotAST(String tableAlias, String fieldAlias) {
@@ -245,13 +255,13 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
   }
 
   private static ArrayList<HQLContextInterface> getUnionContexts(CandidateFact fact, Map<Dimension, CandidateDim>
-    dimsToQuery, CubeQueryContext query)
+    dimsToQuery, CubeQueryContext query, QueryAST ast)
     throws LensException {
     ArrayList<HQLContextInterface> contexts = new ArrayList<>();
     String alias = query.getAliasForTableName(query.getCube().getName());
     for (String storageTable : fact.getStorageTables()) {
-      SingleFactHQLContext ctx = new SingleFactHQLContext(fact, storageTable + " " + alias, dimsToQuery, query,
-        fact.getWhereClause(storageTable.substring(storageTable.indexOf(".") + 1)));
+      SingleFactSingleStorageHQLContext ctx = new SingleFactSingleStorageHQLContext(fact, storageTable + " " + alias,
+        dimsToQuery, query, DefaultQueryAST.fromCandidateFact(fact, storageTable, ast));
       contexts.add(ctx);
     }
     return contexts;

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java
new file mode 100644
index 0000000..b1a3b3f
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.lens.cube.metadata.Dimension;
+import org.apache.lens.server.api.error.LensException;
+
+/**
+ * HQL context class which passes down all query strings to come from DimOnlyHQLContext and works with fact being
+ * queried.
+ * <p/>
+ * Updates from string with join clause expanded
+ */
+class SingleFactSingleStorageHQLContext extends DimOnlyHQLContext {
+
+  private final CandidateFact fact;
+  private final Set<Dimension> queriedDimSet;
+  private String storageAlias;
+
+  SingleFactSingleStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery,
+    CubeQueryContext query, QueryAST ast)
+    throws LensException {
+    this(fact, dimsToQuery, dimsToQuery.keySet(), query, ast);
+  }
+
+  SingleFactSingleStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery,
+    Set<Dimension> dimsQueried, CubeQueryContext query, QueryAST ast)
+    throws LensException {
+    super(dimsToQuery, dimsQueried, query, ast);
+    this.fact = fact;
+    this.queriedDimSet = dimsQueried;
+  }
+
+  SingleFactSingleStorageHQLContext(CandidateFact fact, String storageAlias, Map<Dimension, CandidateDim> dimsToQuery,
+    CubeQueryContext query, QueryAST ast) throws LensException {
+    this(fact, dimsToQuery, query, ast);
+    this.storageAlias = storageAlias;
+  }
+
+  @Override
+  protected String getFromTable() throws LensException {
+    if (getQuery().isAutoJoinResolved()) {
+      if (storageAlias != null) {
+        return storageAlias;
+      } else {
+        return fact.getStorageString(query.getAliasForTableName(query.getCube().getName()));
+      }
+    } else {
+      if (fact.getStorageTables().size() == 1) {
+        return getQuery().getQBFromString(fact, getDimsToQuery());
+      } else {
+        return storageAlias;
+      }
+    }
+  }
+
+  @Override
+  protected CandidateFact getQueriedFact() {
+    return fact;
+  }
+
+  @Override
+  public Set<Dimension> getQueriedDimSet() {
+    return queriedDimSet;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index 62cc071..14def15 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -19,14 +19,10 @@
 package org.apache.lens.cube.parse;
 
 import static org.apache.lens.cube.metadata.DateUtil.WSPACE;
-import static org.apache.lens.cube.metadata.MetastoreUtil.getFactOrDimtableStorageTableName;
-import static org.apache.lens.cube.metadata.MetastoreUtil.getStoragetableEndTimesKey;
-import static org.apache.lens.cube.metadata.MetastoreUtil.getStoragetableStartTimesKey;
+import static org.apache.lens.cube.metadata.MetastoreUtil.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.*;
-import static org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode.PART_COL_DOES_NOT_EXIST;
-import static org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode.RANGE_NOT_ANSWERABLE;
-import static org.apache.lens.cube.parse.StorageUtil.joinWithAnd;
+import static org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode.*;
 
 import java.text.DateFormat;
 import java.text.ParseException;
@@ -36,10 +32,7 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.lens.cube.metadata.*;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCause;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipUpdatePeriodCode;
+import org.apache.lens.cube.parse.CandidateTablePruneCause.*;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
@@ -49,7 +42,6 @@ import org.apache.hadoop.util.ReflectionUtils;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
-
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -361,9 +353,9 @@ class StorageTableResolver implements ContextRewriter {
   private void resolveFactStoragePartitions(CubeQueryContext cubeql) throws LensException {
     // Find candidate tables wrt supported storages
     Iterator<CandidateFact> i = cubeql.getCandidateFacts().iterator();
-    Map<TimeRange, String> whereClauseForFallback = new LinkedHashMap<TimeRange, String>();
     while (i.hasNext()) {
       CandidateFact cfact = i.next();
+      Map<TimeRange, String> whereClauseForFallback = new LinkedHashMap<TimeRange, String>();
       List<FactPartition> answeringParts = new ArrayList<>();
       Map<String, SkipStorageCause> skipStorageCauses = skipStorageCausesPerFact.get(cfact.fact);
       if (skipStorageCauses == null) {
@@ -434,9 +426,6 @@ class StorageTableResolver implements ContextRewriter {
         cfact.incrementPartsQueried(rangeParts.size());
         answeringParts.addAll(rangeParts);
         cfact.getPartsQueried().addAll(rangeParts);
-        String rangeWhereClause = rangeWriter.getTimeRangeWhereClause(cubeql,
-          cubeql.getAliasForTableName(cubeql.getCube().getName()), rangeParts);
-        cfact.getRangeToWhereClause().put(range, joinWithAnd(rangeWhereClause, extraWhereClause.toString()));
       }
       if (!unsupportedTimeDims.isEmpty()) {
         log.info("Not considering fact table:{} as it doesn't support time dimensions: {}", cfact.fact,
@@ -483,24 +472,27 @@ class StorageTableResolver implements ContextRewriter {
       Set<String> storageTables = new LinkedHashSet<>();
       storageTables.addAll(minimalStorageTables.keySet());
       cfact.setStorageTables(storageTables);
-
       // Update range->storage->partitions with time range where clause
       for (TimeRange trange : cfact.getRangeToStoragePartMap().keySet()) {
-        Map<String, String> rangeToWhere = new HashMap<String, String>();
+        Map<String, String> rangeToWhere = new HashMap<>();
         for (Map.Entry<String, Set<FactPartition>> entry : minimalStorageTables.entrySet()) {
           String table = entry.getKey();
           Set<FactPartition> minimalParts = entry.getValue();
 
           LinkedHashSet<FactPartition> rangeParts = cfact.getRangeToStoragePartMap().get(trange).get(table);
-          LinkedHashSet<FactPartition> minimalPartsCopy = new LinkedHashSet<FactPartition>(minimalParts);
-          minimalPartsCopy.retainAll(rangeParts);
+          LinkedHashSet<FactPartition> minimalPartsCopy = Sets.newLinkedHashSet();
+
+          if (rangeParts != null) {
+            minimalPartsCopy.addAll(minimalParts);
+            minimalPartsCopy.retainAll(rangeParts);
+          }
           if (!StringUtils.isEmpty(whereClauseForFallback.get(trange))) {
-            rangeToWhere.put(
-              rangeWriter.getTimeRangeWhereClause(cubeql, cubeql.getAliasForTableName(cubeql.getCube().getName()),
-                minimalPartsCopy) + " and  " + whereClauseForFallback.get(trange), table);
+            rangeToWhere.put(table, "(("
+              + rangeWriter.getTimeRangeWhereClause(cubeql, cubeql.getAliasForTableName(cubeql.getCube().getName()),
+                minimalPartsCopy) + ") and  (" + whereClauseForFallback.get(trange) + "))");
           } else {
-            rangeToWhere.put(rangeWriter.getTimeRangeWhereClause(cubeql,
-              cubeql.getAliasForTableName(cubeql.getCube().getName()), minimalPartsCopy), table);
+            rangeToWhere.put(table, rangeWriter.getTimeRangeWhereClause(cubeql,
+              cubeql.getAliasForTableName(cubeql.getCube().getName()), minimalPartsCopy));
           }
         }
         cfact.getRangeToStorageWhereMap().put(trange, rangeToWhere);
@@ -592,8 +584,7 @@ class StorageTableResolver implements ContextRewriter {
     int lookAheadNumParts =
       conf.getInt(CubeQueryConfUtil.getLookAheadPTPartsKey(interval), CubeQueryConfUtil.DEFAULT_LOOK_AHEAD_PT_PARTS);
 
-    TimeRange.Iterable.Iterator iter = TimeRange.iterable(ceilFromDate, floorToDate, interval, 1)
-      .iterator();
+    TimeRange.Iterable.Iterator iter = TimeRange.iterable(ceilFromDate, floorToDate, interval, 1).iterator();
     // add partitions from ceilFrom to floorTo
     while (iter.hasNext()) {
       Date dt = iter.next();

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
index 87e4ce3..e7b9403 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
@@ -26,6 +26,10 @@ import java.util.Calendar;
 import java.util.Date;
 import java.util.HashMap;
 
+import org.apache.hadoop.util.StringUtils;
+
+import com.google.common.collect.Lists;
+
 public class DateFactory {
   private DateFactory() {
 
@@ -143,6 +147,7 @@ public class DateFactory {
   // Time Ranges
   public static final String LAST_HOUR_TIME_RANGE;
   public static final String TWO_DAYS_RANGE;
+  public static final String TWO_DAYS_RANGE_SPLIT_OVER_UPDATE_PERIODS;
   public static final String TWO_DAYS_RANGE_TTD;
   public static final String TWO_DAYS_RANGE_TTD_BEFORE_4_DAYS;
   public static final String TWO_DAYS_RANGE_TTD2;
@@ -192,5 +197,11 @@ public class DateFactory {
 
     // calculate LAST_HOUR_TIME_RANGE
     LAST_HOUR_TIME_RANGE = getTimeRangeString(HOURLY, -1, 0);
+
+    TWO_DAYS_RANGE_SPLIT_OVER_UPDATE_PERIODS = StringUtils.join(" OR ", Lists.newArrayList(
+      getTimeRangeString(getDateStringWithOffset(HOURLY, -48), getDateStringWithOffset(DAILY, -1)),
+      getTimeRangeString(getDateStringWithOffset(DAILY, 0), getDateStringWithOffset(HOURLY, 0)),
+      getTimeRangeString(getDateStringWithOffset(DAILY, -1), getDateStringWithOffset(DAILY, 0))
+    ));
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 3f01dbe..ad20ae1 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -512,7 +512,7 @@ public class CubeTestSetup {
       "No aggregateMsr", null, null, null));
     cubeMeasures.add(new ColumnMeasure(new FieldSchema("newmeasure", "bigint", "measure available  from now"),
       "New measure", null, null, null, NOW, null, 100.0));
-    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr15", "int", "first measure"), "Measure15", null, "SUM",
+    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr15", "int", "fifteenth measure"), "Measure15", null, "SUM",
       "RS"));
 
     cubeDimensions = new HashSet<CubeDimAttribute>();

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index a5886dc..5b44f95 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -50,6 +50,7 @@ import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
 import com.google.common.base.Splitter;
+import com.google.common.collect.Sets;
 import lombok.Getter;
 
 public class TestBaseCubeQueries extends TestQueryRewrite {
@@ -546,17 +547,24 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     assertEquals(ctx.getCandidateFactSets().size(), 1);
     assertEquals(ctx.getCandidateFactSets().iterator().next().size(), 1);
     CandidateFact cfact = ctx.getCandidateFactSets().iterator().next().iterator().next();
-    assertEquals(cfact.getRangeToWhereClause().size(), 2);
-    for(Map.Entry<TimeRange, String> entry: cfact.getRangeToWhereClause().entrySet()) {
+
+    assertEquals(cfact.getRangeToStoragePartMap().size(), 2);
+    Set<String> storages = Sets.newHashSet();
+    for(Map<String, String> entry: cfact.getRangeToStorageWhereMap().values()) {
+      storages.addAll(entry.keySet());
+    }
+    assertEquals(storages.size(), 1);
+    String storage = storages.iterator().next();
+    for(Map.Entry<TimeRange, Map<String, String>> entry: cfact.getRangeToStorageWhereMap().entrySet()) {
       if (entry.getKey().getPartitionColumn().equals("dt")) {
-        ASTNode parsed = HQLParser.parseExpr(entry.getValue());
+        ASTNode parsed = HQLParser.parseExpr(entry.getValue().get(storage));
         assertEquals(parsed.getToken().getType(), KW_AND);
-        assertTrue(entry.getValue().substring(((CommonToken) parsed.getToken()).getStopIndex() + 1).toLowerCase()
-          .contains(dTimeWhereClause));
-        assertFalse(entry.getValue().substring(0, ((CommonToken) parsed.getToken()).getStartIndex()).toLowerCase()
-          .contains("and"));
+        assertTrue(entry.getValue().get(storage).substring(((CommonToken) parsed.getToken()).getStopIndex() + 1)
+          .toLowerCase().contains(dTimeWhereClause));
+        assertFalse(entry.getValue().get(storage).substring(0, ((CommonToken) parsed.getToken()).getStartIndex())
+          .toLowerCase().contains("and"));
       } else if (entry.getKey().getPartitionColumn().equals("ttd")) {
-        assertFalse(entry.getValue().toLowerCase().contains("and"));
+        assertFalse(entry.getValue().get(storage).toLowerCase().contains("and"));
       } else {
         throw new LensException("Unexpected");
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 9a08735..f02cdb0 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -41,7 +41,6 @@ import org.apache.lens.server.api.LensServerAPITestUtil;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.time.DateUtils;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -58,7 +57,6 @@ import com.google.common.base.Splitter;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
-
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
@@ -410,7 +408,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
           return getWhereForMonthlyDailyAndHourly2monthsUnionQuery(storage);
         }
       };
-      try{
+      try {
         rewrite("select cityid as `City ID`, msr8, msr7 as `Third measure` "
           + "from testCube where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
         fail("Union feature is disabled, should have failed");
@@ -501,6 +499,35 @@ public class TestCubeRewriter extends TestQueryRewrite {
   }
 
   @Test
+  public void testMultiFactMultiStorage() throws ParseException, LensException {
+    Configuration conf = LensServerAPITestUtil.getConfiguration(
+      CubeQueryConfUtil.ENABLE_STORAGES_UNION, true,
+      CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2",
+      getValidStorageTablesKey("testfact"), "C1_testFact,C2_testFact",
+      getValidUpdatePeriodsKey("testfact", "C1"), "HOURLY",
+      getValidUpdatePeriodsKey("testfact", "C2"), "DAILY",
+      getValidUpdatePeriodsKey("testfact2_raw", "C1"), "YEARLY",
+      getValidUpdatePeriodsKey("testfact2_raw", "C2"), "YEARLY");
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(HOURLY));
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(DAILY));
+    String whereCond = "zipcode = 'a' and cityid = 'b' and (" + TWO_DAYS_RANGE_SPLIT_OVER_UPDATE_PERIODS + ")";
+    String hqlQuery = rewrite("cube select zipcode, count(msr4), sum(msr15) from testCube where " + whereCond, conf);
+    System.out.println(hqlQuery);
+    String possibleStart1 = "SELECT COALESCE(mq1.zipcode, mq2.zipcode) zipcode, mq1.msr4 msr4, mq2.msr15 msr15 FROM ";
+    String possibleStart2 = "SELECT COALESCE(mq1.zipcode, mq2.zipcode) zipcode, mq2.msr4 msr4, mq1.msr15 msr15 FROM ";
+
+    assertTrue(hqlQuery.startsWith(possibleStart1) || hqlQuery.startsWith(possibleStart2));
+    compareContains(rewrite("cube select zipcode as `zipcode`, sum(msr15) as `msr15` from testcube where " + whereCond,
+      conf), hqlQuery);
+    compareContains(rewrite("cube select zipcode as `zipcode`, count(msr4) as `msr4` from testcube where " + whereCond,
+      conf), hqlQuery);
+    assertTrue(hqlQuery.endsWith("on mq1.zipcode <=> mq2.zipcode"));
+    // No time_range_in should be remaining
+    assertFalse(hqlQuery.contains("time_range_in"));
+    //TODO: handle having after LENS-813, also handle for order by and limit
+  }
+
+  @Test
   public void testCubeWhereQueryWithMultipleTables() throws Exception {
     Configuration conf = getConf();
     conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, true);
@@ -1120,18 +1147,18 @@ public class TestCubeRewriter extends TestQueryRewrite {
       new HashMap<String, List<CandidateTablePruneCause>>() {
         {
           put("statetable", Arrays.asList(CandidateTablePruneCause.noCandidateStorages(
-              new HashMap<String, SkipStorageCause>() {
-                {
-                  put("c1_statetable", new SkipStorageCause(SkipStorageCode.NO_PARTITIONS));
-                }
-              }))
+            new HashMap<String, SkipStorageCause>() {
+              {
+                put("c1_statetable", new SkipStorageCause(SkipStorageCode.NO_PARTITIONS));
+              }
+            }))
           );
           put("statetable_partitioned", Arrays.asList(CandidateTablePruneCause.noCandidateStorages(
-              new HashMap<String, SkipStorageCause>() {
-                {
-                  put("C3_statetable_partitioned", new SkipStorageCause(SkipStorageCode.UNSUPPORTED));
-                }
-              }))
+            new HashMap<String, SkipStorageCause>() {
+              {
+                put("C3_statetable_partitioned", new SkipStorageCause(SkipStorageCode.UNSUPPORTED));
+              }
+            }))
           );
         }
       }


[37/51] [abbrv] lens git commit: LENS-923: CLI should allow query execution without the prefix 'query execute'

Posted by de...@apache.org.
LENS-923: CLI should allow query execution without the prefix 'query execute'


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/919936be
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/919936be
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/919936be

Branch: refs/heads/current-release-line
Commit: 919936bec58529ad555a2f858bb8eed56b34ac4e
Parents: b1f38d5
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Mon Jan 25 19:36:26 2016 +0800
Committer: Raju Bairishetti <ra...@apache.org>
Committed: Mon Jan 25 19:36:26 2016 +0800

----------------------------------------------------------------------
 .../lens/cli/commands/BaseLensCommand.java      |  9 ++++
 .../cli/commands/LensLogResourceCommands.java   |  4 +-
 .../lens/cli/commands/LensQueryCommands.java    | 56 ++++++++++++--------
 src/site/apt/user/cli.apt                       |  8 ++-
 4 files changed, 51 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/919936be/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java
index 6437725..66a6c4d 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/BaseLensCommand.java
@@ -194,4 +194,13 @@ public class BaseLensCommand implements ExecutionProcessor {
     return pathValidator.removePrefixBeforeURI(path);
   }
 
+  public String getOrDefaultQueryHandleString(String queryHandleString) {
+    if (queryHandleString != null) {
+      return queryHandleString;
+    }
+    if (getClient().getStatement().getQuery() != null) {
+      return getClient().getStatement().getQueryHandleString();
+    }
+    throw new IllegalArgumentException("Query handle not provided and no queries interacted with in the session.");
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/919936be/lens-cli/src/main/java/org/apache/lens/cli/commands/LensLogResourceCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensLogResourceCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensLogResourceCommands.java
index 59b7355..1a3394a 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensLogResourceCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensLogResourceCommands.java
@@ -46,11 +46,13 @@ public class LensLogResourceCommands extends BaseLensCommand {
 
   @CliCommand(value = "show logs",
     help = "show logs for the given handle <log_handle>. Handle can either be a query handle or request id. "
+      + LensQueryCommands.DEFAULT_QUERY_HANDLE_DESCRIPTION + " "
       + "You can optionally provide a location to save the logs as <save_location>")
   public String getLogs(
-    @CliOption(key = {"", "log_handle"}, mandatory = true, help = "<log_handle>")
+    @CliOption(key = {"", "log_handle"}, mandatory = false, help = "<log_handle>")
     String logFile, @CliOption(key = {"save_location"}, mandatory = false, help = "<save_location>") String location) {
     try {
+      logFile = getOrDefaultQueryHandleString(logFile);
       Response response = getClient().getLogs(logFile);
       if (response.getStatus() == Response.Status.OK.getStatusCode()) {
         if (StringUtils.isBlank(location)) {

http://git-wip-us.apache.org/repos/asf/lens/blob/919936be/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
index a29600d..e2ac3af 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
@@ -61,22 +61,42 @@ import com.google.common.base.Joiner;
     + "  <<<query execute cube select id,name from dim_table where name != \"\"first\"\">>>,\n"
     + "  will be parsed as <<<cube select id,name from dim_table where name != \"first\">>>")
 public class LensQueryCommands extends BaseLensCommand {
-  private static final String DEFAULT_QUERY_HANDLE_DESCRIPTION =
+  public static final String DEFAULT_QUERY_HANDLE_DESCRIPTION =
     "If not provided, takes last query handle interacted with.";
+  private static final String ASYNC_DOC =
+    "If <async> is true, The query is launched in async manner and query handle is returned. It's by default false.";
+  private static final String QUERY_NAME_DOC = "<query name> can also be provided, though not required.";
 
-  /**
-   * Execute query.
-   *
-   * @param sql       the sql
-   * @param async    the asynch
-   * @param queryName the query name
-   * @return the string
-   */
+  @CliCommand(value = "select",
+    help = "Execute query <select query-string-without-select>. " + ASYNC_DOC + " " + QUERY_NAME_DOC)
+  public String executeSelectQuery(
+    @CliOption(key = {""}, mandatory = true, help = "<query-string-without-select>") String sql,
+    @CliOption(key = {"async"}, mandatory = false, unspecifiedDefaultValue = "false",
+      specifiedDefaultValue = "true", help = "<async>") boolean async,
+    @CliOption(key = {"name"}, mandatory = false, help = "<query-name>") String queryName) {
+    return executeQuery("select " + sql, async, queryName);
+  }
+
+  @CliCommand(value = "cube select",
+    help = "Execute cube query <cube select query-string-without-cube-select>. " + ASYNC_DOC + " " + QUERY_NAME_DOC)
+  public String executeCubeSelectQuery(
+    @CliOption(key = {""}, mandatory = true, help = "<query-string-without-cube-select>") String sql,
+    @CliOption(key = {"async"}, mandatory = false, unspecifiedDefaultValue = "false",
+      specifiedDefaultValue = "true", help = "<async>") boolean async,
+    @CliOption(key = {"name"}, mandatory = false, help = "<query-name>") String queryName) {
+    return executeQuery("cube select " + sql, async, queryName);
+  }
+
+    /**
+     * Execute query.
+     *
+     * @param sql       the sql
+     * @param async    the asynch
+     * @param queryName the query name
+     * @return the string
+     */
   @CliCommand(value = "query execute",
-    help = "Execute query <query-string>."
-      +
-      " If <async> is true, The query is launched in async manner and query handle is returned. It's by default false."
-      + " <query name> can also be provided, though not required")
+    help = "Execute query <query-string>. " + ASYNC_DOC + " " + QUERY_NAME_DOC)
   public String executeQuery(
     @CliOption(key = {"", "query"}, mandatory = true, help = "<query-string>") String sql,
     @CliOption(key = {"async"}, mandatory = false, unspecifiedDefaultValue = "false",
@@ -140,16 +160,6 @@ public class LensQueryCommands extends BaseLensCommand {
     return b.toString();
   }
 
-  public String getOrDefaultQueryHandleString(String queryHandleString) {
-    if (queryHandleString != null) {
-      return queryHandleString;
-    }
-    if (getClient().getStatement().getQuery() != null) {
-      return getClient().getStatement().getQueryHandleString();
-    }
-    throw new IllegalArgumentException("Query handle not provided and no queries interacted with in the session.");
-  }
-
   /**
    * Gets the status.
    *

http://git-wip-us.apache.org/repos/asf/lens/blob/919936be/src/site/apt/user/cli.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/user/cli.apt b/src/site/apt/user/cli.apt
index 65380e6..2522e03 100644
--- a/src/site/apt/user/cli.apt
+++ b/src/site/apt/user/cli.apt
@@ -335,6 +335,8 @@ User CLI Commands
 *--+--+
 |<<Command>>|<<Description>>|
 *--+--+
+|cube select \<query-string-without-cube-select\> [--async \<async\>] [--name \<query-name\>]|Execute cube query <<<cube select query-string-without-cube-select>>>. If <<<async>>> is true, The query is launched in async manner and query handle is returned. It's by default false. <<<query name>>> can also be provided, though not required.|
+*--+--+
 |prepQuery destroy [--prepare_handle] \<prepare_handle\>|Destroy prepared query with handle <<<prepare_handle>>>|
 *--+--+
 |prepQuery details [--prepare_handle] \<prepare_handle\>|Get prepared query with handle <<<prepare_handle>>>|
@@ -349,7 +351,7 @@ User CLI Commands
 *--+--+
 |query details [[--query_handle] \<query_handle\>]|Get query details of query with handle <<<query_handle>>>.If not provided, takes last query handle interacted with.|
 *--+--+
-|query execute [--query] \<query-string\> [--async \<async\>] [--name \<query-name\>]|Execute query <<<query-string>>>. If <<<async>>> is true, The query is launched in async manner and query handle is returned. It's by default false. <<<query name>>> can also be provided, though not required|
+|query execute [--query] \<query-string\> [--async \<async\>] [--name \<query-name\>]|Execute query <<<query-string>>>. If <<<async>>> is true, The query is launched in async manner and query handle is returned. It's by default false. <<<query name>>> can also be provided, though not required.|
 *--+--+
 |query explain [--query] \<query-string\> [--save_location \<save_location\>]|Explain execution plan of query <<<query-string>>>. Can optionally save the plan to a file by providing <<<save_location>>>|
 *--+--+
@@ -361,6 +363,8 @@ User CLI Commands
 *--+--+
 |query status [[--query_handle] \<query_handle\>]|Fetch status of executed query having query handle <<<query_handle>>>. If not provided, takes last query handle interacted with.|
 *--+--+
+|select \<query-string-without-select\> [--async \<async\>] [--name \<query-name\>]|Execute query <<<select query-string-without-select>>>. If <<<async>>> is true, The query is launched in async manner and query handle is returned. It's by default false. <<<query name>>> can also be provided, though not required.|
+*--+--+
   <<Lens Query Commands>>
 
 ===
@@ -372,7 +376,7 @@ User CLI Commands
 *--+--+
 |<<Command>>|<<Description>>|
 *--+--+
-|show logs [--log_handle] \<log_handle\> [--save_location \<save_location\>]|show logs for the given handle <<<log_handle>>>. Handle can either be a query handle or request id. You can optionally provide a location to save the logs as <<<save_location>>>|
+|show logs [[--log_handle] \<log_handle\>] [--save_location \<save_location\>]|show logs for the given handle <<<log_handle>>>. Handle can either be a query handle or request id. If not provided, takes last query handle interacted with. You can optionally provide a location to save the logs as <<<save_location>>>|
 *--+--+
   <<Lens Log Resource Commands>>
 


[51/51] [abbrv] lens git commit: Fixed merge conflicts

Posted by de...@apache.org.
Fixed merge conflicts


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/15396047
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/15396047
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/15396047

Branch: refs/heads/current-release-line
Commit: 15396047b2e85d57eb6ffa73dbc4296ead6c9f49
Parents: d559ef2 79261f9
Author: Deepak Barr <de...@apache.org>
Authored: Wed Feb 3 16:46:26 2016 +0530
Committer: Deepak Kumar Barr <de...@gmail.com>
Committed: Wed Feb 3 16:46:26 2016 +0530

----------------------------------------------------------------------

----------------------------------------------------------------------



[49/51] [abbrv] lens git commit: LENS-920 : Fix issues in producing and consuming json for all api

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/customer_table.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/customer_table.xml b/lens-examples/src/main/resources/customer_table.xml
index af308c2..28067bb 100644
--- a/lens-examples/src/main/resources/customer_table.xml
+++ b/lens-examples/src/main/resources/customer_table.xml
@@ -22,12 +22,12 @@
 <x_dimension_table dimension_name="customer" table_name="customer_table" weight="100.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="ID" name="id" type="INT"/>
-    <column comment="name" name="name" type="STRING"/>
-    <column comment="" name="gender" type="STRING"/>
-    <column comment="" name="age" type="INT"/>
-    <column comment="" name="city_id" type="INT"/>
-    <column comment="" name="customer_credit_status" type="STRING"/>
+    <column comment="ID" name="id" _type="INT"/>
+    <column comment="name" name="name" _type="STRING"/>
+    <column comment="" name="gender" _type="STRING"/>
+    <column comment="" name="age" _type="INT"/>
+    <column comment="" name="city_id" _type="INT"/>
+    <column comment="" name="customer_credit_status" _type="STRING"/>
   </columns>
   <properties>
     <property name="dim4.prop" value="d1"/>
@@ -40,7 +40,7 @@
       <storage_name>local</storage_name>
       <table_desc external="true" field_delimiter="," table_location="/tmp/examples/customer">
         <part_cols>
-          <column comment="Time column" name="dt" type="STRING"/>
+          <column comment="Time column" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/dim_table.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/dim_table.xml b/lens-examples/src/main/resources/dim_table.xml
index 10c9ac0..7659555 100644
--- a/lens-examples/src/main/resources/dim_table.xml
+++ b/lens-examples/src/main/resources/dim_table.xml
@@ -22,10 +22,10 @@
 <x_dimension_table dimension_name="sample_dim" table_name="dim_table" weight="100.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="ID" name="id" type="INT"/>
-    <column comment="name" name="name" type="STRING"/>
-    <column comment="more details" name="detail" type="STRING"/>
-    <column comment="d2 ID" name="d2id" type="INT"/>
+    <column comment="ID" name="id" _type="INT"/>
+    <column comment="name" name="name" _type="STRING"/>
+    <column comment="more details" name="detail" _type="STRING"/>
+    <column comment="d2 ID" name="d2id" _type="INT"/>
   </columns>
   <properties>
     <property name="dim1.prop" value="d1"/>
@@ -38,7 +38,7 @@
       <storage_name>local</storage_name>
       <table_desc external="true" field_delimiter="," table_location="/tmp/examples/dim1">
         <part_cols>
-          <column comment="Time column" name="dt" type="STRING"/>
+          <column comment="Time column" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/dim_table2.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/dim_table2.xml b/lens-examples/src/main/resources/dim_table2.xml
index e72f9c5..3a449c4 100644
--- a/lens-examples/src/main/resources/dim_table2.xml
+++ b/lens-examples/src/main/resources/dim_table2.xml
@@ -22,9 +22,9 @@
 <x_dimension_table dimension_name="sample_dim2" table_name="dim_table2" weight="100.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="ID" name="id" type="INT"/>
-    <column comment="name" name="name" type="STRING"/>
-    <column comment="more details for dim2" name="detail2" type="STRING"/>
+    <column comment="ID" name="id" _type="INT"/>
+    <column comment="name" name="name" _type="STRING"/>
+    <column comment="more details for dim2" name="detail2" _type="STRING"/>
   </columns>
   <properties>
     <property name="dim2.prop" value="d2"/>
@@ -37,7 +37,7 @@
       <storage_name>local</storage_name>
       <table_desc external="true" field_delimiter="," table_location="/tmp/examples/dim2">
         <part_cols>
-          <column comment="Time column" name="dt" type="STRING"/>
+          <column comment="Time column" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/dim_table3.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/dim_table3.xml b/lens-examples/src/main/resources/dim_table3.xml
index 7955426..4f67af8 100644
--- a/lens-examples/src/main/resources/dim_table3.xml
+++ b/lens-examples/src/main/resources/dim_table3.xml
@@ -22,10 +22,10 @@
 <x_dimension_table dimension_name="sample_db_dim" table_name="dim_table3" weight="100.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="ID" name="id" type="INT"/>
-    <column comment="name" name="name" type="STRING"/>
-    <column comment="more details" name="detail" type="STRING"/>
-    <column comment="d2 ID" name="d2id" type="INT"/>
+    <column comment="ID" name="id" _type="INT"/>
+    <column comment="name" name="name" _type="STRING"/>
+    <column comment="more details" name="detail" _type="STRING"/>
+    <column comment="d2 ID" name="d2id" _type="INT"/>
   </columns>
   <properties>
     <property name="dim3.prop" value="d1"/>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/dim_table4.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/dim_table4.xml b/lens-examples/src/main/resources/dim_table4.xml
index 9de1cd7..56c22b3 100644
--- a/lens-examples/src/main/resources/dim_table4.xml
+++ b/lens-examples/src/main/resources/dim_table4.xml
@@ -22,10 +22,10 @@
 <x_dimension_table dimension_name="sample_dim" table_name="dim_table4" weight="100.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="ID" name="id" type="INT"/>
-    <column comment="name" name="name" type="STRING"/>
-    <column comment="more details" name="detail" type="STRING"/>
-    <column comment="d2 ID" name="d2id" type="INT"/>
+    <column comment="ID" name="id" _type="INT"/>
+    <column comment="name" name="name" _type="STRING"/>
+    <column comment="more details" name="detail" _type="STRING"/>
+    <column comment="d2 ID" name="d2id" _type="INT"/>
   </columns>
   <properties>
     <property name="dim4.prop" value="d1"/>
@@ -38,7 +38,7 @@
       <storage_name>local</storage_name>
       <table_desc external="true" field_delimiter="," table_location="/tmp/examples/dim4">
         <part_cols>
-          <column comment="Time column" name="dt" type="STRING"/>
+          <column comment="Time column" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/fact1.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/fact1.xml b/lens-examples/src/main/resources/fact1.xml
index c934a64..effdfac 100644
--- a/lens-examples/src/main/resources/fact1.xml
+++ b/lens-examples/src/main/resources/fact1.xml
@@ -22,10 +22,10 @@
 <x_fact_table cube_name="sample_cube" name="fact1" weight="100.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="" name="dim1" type="INT"/>
-    <column comment="" name="measure2" type="BIGINT"/>
-    <column comment="" name="measure3" type="INT"/>
-    <column comment="" name="measure4" type="FLOAT"/>
+    <column comment="" name="dim1" _type="INT"/>
+    <column comment="" name="measure2" _type="BIGINT"/>
+    <column comment="" name="measure3" _type="INT"/>
+    <column comment="" name="measure4" _type="FLOAT"/>
   </columns>
   <properties>
     <property name="cube.fact.is.aggregated" value="true"/>
@@ -39,7 +39,7 @@
       <storage_name>local</storage_name>
       <table_desc external="true" field_delimiter="," table_location="/tmp/examples/fact1_local">
         <part_cols>
-          <column comment="Time column" name="dt" type="STRING"/>
+          <column comment="Time column" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/fact2.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/fact2.xml b/lens-examples/src/main/resources/fact2.xml
index 3298fc1..f1633f3 100644
--- a/lens-examples/src/main/resources/fact2.xml
+++ b/lens-examples/src/main/resources/fact2.xml
@@ -22,11 +22,11 @@
 <x_fact_table cube_name="sample_cube" name="fact2" weight="200.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="" name="dim1" type="INT"/>
-    <column comment="" name="dim3" type="INT"/>
-    <column comment="" name="measure2" type="INT"/>
-    <column comment="" name="measure3" type="FLOAT"/>
-    <column comment="" name="measure4" type="FLOAT"/>
+    <column comment="" name="dim1" _type="INT"/>
+    <column comment="" name="dim3" _type="INT"/>
+    <column comment="" name="measure2" _type="INT"/>
+    <column comment="" name="measure3" _type="FLOAT"/>
+    <column comment="" name="measure4" _type="FLOAT"/>
   </columns>
   <properties>
     <property name="cube.fact.is.aggregated" value="true"/>
@@ -40,7 +40,7 @@
       <storage_name>local</storage_name>
       <table_desc external="true" field_delimiter="," table_location="/tmp/examples/fact2_local">
         <part_cols>
-          <column comment="Time column" name="dt" type="STRING"/>
+          <column comment="Time column" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/product.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/product.xml b/lens-examples/src/main/resources/product.xml
index bb91997..2ab07f1 100644
--- a/lens-examples/src/main/resources/product.xml
+++ b/lens-examples/src/main/resources/product.xml
@@ -22,16 +22,16 @@
 <x_dimension name="product" xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
   xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <attributes>
-    <dim_attribute name="id" type="INT"/>
-    <dim_attribute name="SKU_number" type="INT" />
-    <dim_attribute name="description" type="STRING" />
-    <dim_attribute name="color" type="STRING" />
-    <dim_attribute name="weight" type="FLOAT" />
-    <dim_attribute name="category" type="STRING" />
-    <dim_attribute name="manufacturer" type="STRING" />
+    <dim_attribute name="id" _type="INT"/>
+    <dim_attribute name="SKU_number" _type="INT" />
+    <dim_attribute name="description" _type="STRING" />
+    <dim_attribute name="color" _type="STRING" />
+    <dim_attribute name="weight" _type="FLOAT" />
+    <dim_attribute name="category" _type="STRING" />
+    <dim_attribute name="manufacturer" _type="STRING" />
   </attributes>
   <expressions>
-    <expression name="isHeavy" type="boolean">
+    <expression name="isHeavy" _type="boolean">
       <expr_spec expr = "weight > 10" end_time="2015-04-12T00:00:00"/>
       <expr_spec expr = "weight > 50" start_time="2015-04-12T00:00:00"/>
     </expression>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/product_db_table.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/product_db_table.xml b/lens-examples/src/main/resources/product_db_table.xml
index 867d37b..dabb008 100644
--- a/lens-examples/src/main/resources/product_db_table.xml
+++ b/lens-examples/src/main/resources/product_db_table.xml
@@ -22,13 +22,13 @@
 <x_dimension_table dimension_name="product" table_name="product_db_table" weight="100.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="ID" name="id" type="INT"/>
-    <column comment="SKU_number" name="SKU_number" type="INT"/>
-    <column comment="" name="description" type="STRING"/>
-    <column comment="" name="color" type="STRING"/>
-    <column comment="Category" name="category" type="STRING"/>
-    <column comment="" name="weight" type="FLOAT"/>
-    <column comment="" name="manufacturer" type="STRING"/>
+    <column comment="ID" name="id" _type="INT"/>
+    <column comment="SKU_number" name="SKU_number" _type="INT"/>
+    <column comment="" name="description" _type="STRING"/>
+    <column comment="" name="color" _type="STRING"/>
+    <column comment="Category" name="category" _type="STRING"/>
+    <column comment="" name="weight" _type="FLOAT"/>
+    <column comment="" name="manufacturer" _type="STRING"/>
   </columns>
   <properties>
     <property name="dim4.prop" value="d1"/>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/product_table.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/product_table.xml b/lens-examples/src/main/resources/product_table.xml
index 303f3ad..e039c0d 100644
--- a/lens-examples/src/main/resources/product_table.xml
+++ b/lens-examples/src/main/resources/product_table.xml
@@ -22,12 +22,12 @@
 <x_dimension_table dimension_name="product" table_name="product_table" weight="100.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="ID" name="id" type="INT"/>
-    <column comment="SKU_number" name="SKU_number" type="INT"/>
-    <column comment="" name="description" type="STRING"/>
-    <column comment="" name="color" type="STRING"/>
-    <column comment="" name="weight" type="FLOAT"/>
-    <column comment="" name="manufacturer" type="STRING"/>
+    <column comment="ID" name="id" _type="INT"/>
+    <column comment="SKU_number" name="SKU_number" _type="INT"/>
+    <column comment="" name="description" _type="STRING"/>
+    <column comment="" name="color" _type="STRING"/>
+    <column comment="" name="weight" _type="FLOAT"/>
+    <column comment="" name="manufacturer" _type="STRING"/>
   </columns>
   <properties>
     <property name="dim4.prop" value="d1"/>
@@ -41,8 +41,8 @@
       <storage_name>local</storage_name>
       <table_desc external="true" field_delimiter="," table_location="/tmp/examples/product">
         <part_cols>
-          <column comment="Time column" name="dt" type="STRING"/>
-          <column comment="Category" name="category" type="STRING"/>
+          <column comment="Time column" name="dt" _type="STRING"/>
+          <column comment="Category" name="category" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/rawfact.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/rawfact.xml b/lens-examples/src/main/resources/rawfact.xml
index 8191eb2..65e27b6 100644
--- a/lens-examples/src/main/resources/rawfact.xml
+++ b/lens-examples/src/main/resources/rawfact.xml
@@ -22,13 +22,13 @@
 <x_fact_table cube_name="sample_cube" name="rawfact" weight="500.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="" name="dim1" type="INT"/>
-    <column comment="" name="dim2" type="INT"/>
-    <column comment="" name="dim3" type="INT"/>
-    <column comment="" name="measure1" type="BIGINT"/>
-    <column comment="" name="measure2" type="INT"/>
-    <column comment="" name="measure3" type="FLOAT"/>
-    <column comment="" name="measure4" type="FLOAT"/>
+    <column comment="" name="dim1" _type="INT"/>
+    <column comment="" name="dim2" _type="INT"/>
+    <column comment="" name="dim3" _type="INT"/>
+    <column comment="" name="measure1" _type="BIGINT"/>
+    <column comment="" name="measure2" _type="INT"/>
+    <column comment="" name="measure3" _type="FLOAT"/>
+    <column comment="" name="measure4" _type="FLOAT"/>
   </columns>
   <properties>
     <property name="cube.fact.is.aggregated" value="false"/>
@@ -42,7 +42,7 @@
       <storage_name>local</storage_name>
       <table_desc external="true" field_delimiter="," table_location="/tmp/examples/rawfact">
         <part_cols>
-          <column comment="Time column" name="dt" type="STRING"/>
+          <column comment="Time column" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/sales-aggr-continuous-fact.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-continuous-fact.xml b/lens-examples/src/main/resources/sales-aggr-continuous-fact.xml
index 781a615..74f1686 100644
--- a/lens-examples/src/main/resources/sales-aggr-continuous-fact.xml
+++ b/lens-examples/src/main/resources/sales-aggr-continuous-fact.xml
@@ -22,19 +22,19 @@
 <x_fact_table cube_name="sales" name="sales_aggr_continuous_fact" weight="50.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="" name="order_time" type="TIMESTAMP"/>
-    <column comment="" name="delivery_time" type="TIMESTAMP"/>
-    <column comment="" name="customer_id" type="INT"/>
-    <column comment="" name="product_id" type="INT"/>
-    <column comment="" name="promotion_id" type="INT"/>
-    <column comment="" name="customer_city_id" type="INT"/>
-    <column comment="" name="production_city_id" type="INT"/>
-    <column comment="" name="delivery_city_id" type="INT"/>
-    <column comment="" name="unit_sales" type="BIGINT"/>
-    <column comment="" name="store_sales" type="DOUBLE"/>
-    <column comment="" name="store_cost" type="DOUBLE"/>
-    <column comment="" name="max_line_item_price" type="FLOAT"/>
-    <column comment="" name="max_line_item_discount" type="FLOAT"/>
+    <column comment="" name="order_time" _type="TIMESTAMP"/>
+    <column comment="" name="delivery_time" _type="TIMESTAMP"/>
+    <column comment="" name="customer_id" _type="INT"/>
+    <column comment="" name="product_id" _type="INT"/>
+    <column comment="" name="promotion_id" _type="INT"/>
+    <column comment="" name="customer_city_id" _type="INT"/>
+    <column comment="" name="production_city_id" _type="INT"/>
+    <column comment="" name="delivery_city_id" _type="INT"/>
+    <column comment="" name="unit_sales" _type="BIGINT"/>
+    <column comment="" name="store_sales" _type="DOUBLE"/>
+    <column comment="" name="store_cost" _type="DOUBLE"/>
+    <column comment="" name="max_line_item_price" _type="FLOAT"/>
+    <column comment="" name="max_line_item_discount" _type="FLOAT"/>
   </columns>
   <properties>
     <property name="cube.fact.is.aggregated" value="true"/>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/sales-aggr-fact1.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact1.xml b/lens-examples/src/main/resources/sales-aggr-fact1.xml
index 728c775..9ced07b 100644
--- a/lens-examples/src/main/resources/sales-aggr-fact1.xml
+++ b/lens-examples/src/main/resources/sales-aggr-fact1.xml
@@ -22,19 +22,19 @@
 <x_fact_table cube_name="sales" name="sales_aggr_fact1" weight="300.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="" name="order_time" type="TIMESTAMP"/>
-    <column comment="" name="delivery_time" type="TIMESTAMP"/>
-    <column comment="" name="customer_id" type="INT"/>
-    <column comment="" name="product_id" type="INT"/>
-    <column comment="" name="promotion_id" type="INT"/>
-    <column comment="" name="customer_city_id" type="INT"/>
-    <column comment="" name="production_city_id" type="INT"/>
-    <column comment="" name="delivery_city_id" type="INT"/>
-    <column comment="" name="unit_sales" type="BIGINT"/>
-    <column comment="" name="store_sales" type="DOUBLE"/>
-    <column comment="" name="store_cost" type="DOUBLE"/>
-    <column comment="" name="max_line_item_price" type="FLOAT"/>
-    <column comment="" name="max_line_item_discount" type="FLOAT"/>
+    <column comment="" name="order_time" _type="TIMESTAMP"/>
+    <column comment="" name="delivery_time" _type="TIMESTAMP"/>
+    <column comment="" name="customer_id" _type="INT"/>
+    <column comment="" name="product_id" _type="INT"/>
+    <column comment="" name="promotion_id" _type="INT"/>
+    <column comment="" name="customer_city_id" _type="INT"/>
+    <column comment="" name="production_city_id" _type="INT"/>
+    <column comment="" name="delivery_city_id" _type="INT"/>
+    <column comment="" name="unit_sales" _type="BIGINT"/>
+    <column comment="" name="store_sales" _type="DOUBLE"/>
+    <column comment="" name="store_cost" _type="DOUBLE"/>
+    <column comment="" name="max_line_item_price" _type="FLOAT"/>
+    <column comment="" name="max_line_item_discount" _type="FLOAT"/>
   </columns>
   <properties>
     <property name="cube.fact.is.aggregated" value="true"/>
@@ -48,9 +48,9 @@
       <storage_name>local</storage_name>
       <table_desc external="true" field_delimiter="," table_location="/tmp/examples/aggrfact1">
         <part_cols>
-          <column comment="Process time partition" name="pt" type="STRING"/>
-          <column comment="Order time partition" name="ot" type="STRING"/>
-          <column comment="Delivery time partition" name="dt" type="STRING"/>
+          <column comment="Process time partition" name="pt" _type="STRING"/>
+          <column comment="Order time partition" name="ot" _type="STRING"/>
+          <column comment="Delivery time partition" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>pt</time_part_cols>
         <time_part_cols>ot</time_part_cols>
@@ -65,9 +65,9 @@
       <table_desc external="true" field_delimiter="," table_location="/tmp/db-storage.db"
         storage_handler_name="org.apache.lens.storage.db.DBStorageHandler">
         <part_cols>
-          <column comment="Process time partition" name="pt" type="STRING"/>
-          <column comment="Order time partition" name="ot" type="STRING"/>
-          <column comment="Delivery time partition" name="dt" type="STRING"/>
+          <column comment="Process time partition" name="pt" _type="STRING"/>
+          <column comment="Order time partition" name="ot" _type="STRING"/>
+          <column comment="Delivery time partition" name="dt" _type="STRING"/>
         </part_cols>
         <table_parameters>
           <property name="lens.metastore.native.db.name" value="default"/>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/sales-aggr-fact2.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-aggr-fact2.xml b/lens-examples/src/main/resources/sales-aggr-fact2.xml
index 61edf64..b0038f8 100644
--- a/lens-examples/src/main/resources/sales-aggr-fact2.xml
+++ b/lens-examples/src/main/resources/sales-aggr-fact2.xml
@@ -22,16 +22,16 @@
 <x_fact_table cube_name="sales" name="sales_aggr_fact2" weight="100.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="" name="order_time" type="TIMESTAMP"/>
-    <column comment="" name="delivery_time" type="TIMESTAMP"/>
-    <column comment="" name="product_id" type="INT"/>
-    <column comment="" name="promotion_id" type="INT"/>
-    <column comment="" name="customer_city_id" type="INT"/>
-    <column comment="" name="production_city_id" type="INT"/>
-    <column comment="" name="delivery_city_id" type="INT"/>
-    <column comment="" name="unit_sales" type="BIGINT"/>
-    <column comment="" name="store_sales" type="DOUBLE"/>
-    <column comment="" name="store_cost" type="DOUBLE"/>
+    <column comment="" name="order_time" _type="TIMESTAMP"/>
+    <column comment="" name="delivery_time" _type="TIMESTAMP"/>
+    <column comment="" name="product_id" _type="INT"/>
+    <column comment="" name="promotion_id" _type="INT"/>
+    <column comment="" name="customer_city_id" _type="INT"/>
+    <column comment="" name="production_city_id" _type="INT"/>
+    <column comment="" name="delivery_city_id" _type="INT"/>
+    <column comment="" name="unit_sales" _type="BIGINT"/>
+    <column comment="" name="store_sales" _type="DOUBLE"/>
+    <column comment="" name="store_cost" _type="DOUBLE"/>
   </columns>
   <properties>
     <property name="cube.fact.is.aggregated" value="true"/>
@@ -46,7 +46,7 @@
       <storage_name>local</storage_name>
       <table_desc external="true" field_delimiter="," table_location="/tmp/examples/aggrfact2">
         <part_cols>
-          <column comment="Delivery time partition" name="dt" type="STRING"/>
+          <column comment="Delivery time partition" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>
@@ -59,9 +59,9 @@
       <table_desc external="true" field_delimiter="," table_location="/tmp/db-storage.db"
         storage_handler_name="org.apache.lens.storage.db.DBStorageHandler">
         <part_cols>
-          <column comment="Process time partition" name="pt" type="STRING"/>
-          <column comment="Order time partition" name="ot" type="STRING"/>
-          <column comment="Delivery time partition" name="dt" type="STRING"/>
+          <column comment="Process time partition" name="pt" _type="STRING"/>
+          <column comment="Order time partition" name="ot" _type="STRING"/>
+          <column comment="Delivery time partition" name="dt" _type="STRING"/>
         </part_cols>
         <table_parameters>
           <property name="lens.metastore.native.db.name" value="default"/>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/sales-cube.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-cube.xml b/lens-examples/src/main/resources/sales-cube.xml
index e9e48ce..e944821 100644
--- a/lens-examples/src/main/resources/sales-cube.xml
+++ b/lens-examples/src/main/resources/sales-cube.xml
@@ -29,57 +29,57 @@
     <!-- means dt-20days <= ot <= dt-1hour -->
   </properties>
   <measures>
-    <measure name="unit_sales" type="BIGINT" default_aggr="SUM" display_string="Unit Sales" format_string="#,###"/>
-    <measure name="store_sales" type="DOUBLE" default_aggr="SUM" display_string="Store Sales" format_string="#,###.##"/>
-    <measure name="store_cost" type="DOUBLE" default_aggr="SUM" display_string="Store Cost" format_string="#,###.00"
+    <measure name="unit_sales" _type="BIGINT" default_aggr="SUM" display_string="Unit Sales" format_string="#,###"/>
+    <measure name="store_sales" _type="DOUBLE" default_aggr="SUM" display_string="Store Sales" format_string="#,###.##"/>
+    <measure name="store_cost" _type="DOUBLE" default_aggr="SUM" display_string="Store Cost" format_string="#,###.00"
      start_time='2015-03-01T00:00:00' />
-    <measure name="line_item_quantity" type="INT" default_aggr="SUM" display_string="Line item quantity"/>
-    <measure name="line_item_product_price" type="FLOAT" default_aggr="SUM" display_string="Line item product price"/>
-    <measure name="line_item_discount_amount" type="FLOAT" default_aggr="SUM" display_string="Line item discount"/>
-    <measure name="line_item_tax" type="FLOAT" default_aggr="SUM" display_string="Line item tax"/>
-    <measure name="max_line_item_price" type="FLOAT" default_aggr="MAX" display_string="Maximum Line item price"/>
-    <measure name="max_line_item_discount" type="FLOAT" default_aggr="MAX"
+    <measure name="line_item_quantity" _type="INT" default_aggr="SUM" display_string="Line item quantity"/>
+    <measure name="line_item_product_price" _type="FLOAT" default_aggr="SUM" display_string="Line item product price"/>
+    <measure name="line_item_discount_amount" _type="FLOAT" default_aggr="SUM" display_string="Line item discount"/>
+    <measure name="line_item_tax" _type="FLOAT" default_aggr="SUM" display_string="Line item tax"/>
+    <measure name="max_line_item_price" _type="FLOAT" default_aggr="MAX" display_string="Maximum Line item price"/>
+    <measure name="max_line_item_discount" _type="FLOAT" default_aggr="MAX"
      display_string="Maximum Line item discount"/>
-    <measure name="rating" type="FLOAT" default_aggr="AVG"
+    <measure name="rating" _type="FLOAT" default_aggr="AVG"
       display_string="Rating"/>
   </measures>
   <dim_attributes>
-    <dim_attribute name="customer_id" type="INT" />
-    <dim_attribute name="product_id" type="INT" />
-    <dim_attribute name="promotion_id" type="INT" />
-    <dim_attribute name="order_id" type="INT" />
-    <dim_attribute name="order_line_number" type="INT" />
-    <dim_attribute name="order_time" type="TIMESTAMP" />
-    <dim_attribute name="delivery_time" type="TIMESTAMP" />
-    <dim_attribute name="customer_city_id" type="INT" start_time='2015-03-01T00:00:00' />
-    <dim_attribute name="production_city_id" type="INT" />
-    <dim_attribute name="delivery_city_id" type="INT" />
-    <dim_attribute name="customer_city_name" type="string" description="City name to which the customer belongs"
+    <dim_attribute name="customer_id" _type="INT" />
+    <dim_attribute name="product_id" _type="INT" />
+    <dim_attribute name="promotion_id" _type="INT" />
+    <dim_attribute name="order_id" _type="INT" />
+    <dim_attribute name="order_line_number" _type="INT" />
+    <dim_attribute name="order_time" _type="TIMESTAMP" />
+    <dim_attribute name="delivery_time" _type="TIMESTAMP" />
+    <dim_attribute name="customer_city_id" _type="INT" start_time='2015-03-01T00:00:00' />
+    <dim_attribute name="production_city_id" _type="INT" />
+    <dim_attribute name="delivery_city_id" _type="INT" />
+    <dim_attribute name="customer_city_name" _type="string" description="City name to which the customer belongs"
                    display_string="Customer City">
       <chain_ref_column chain_name="customer_city" ref_col="name" />
     </dim_attribute>
     <dim_attribute name="production_location">
       <hierarchy>
-      <dim_attribute name="production_city_name" type="STRING" description="City name in which the product was produced"
+      <dim_attribute name="production_city_name" _type="STRING" description="City name in which the product was produced"
                      display_string="Production City">
         <chain_ref_column chain_name="production_city" ref_col="name" />
       </dim_attribute>
-      <dim_attribute name="production_state" type="STRING" description="State name in which the product was produced"
+      <dim_attribute name="production_state" _type="STRING" description="State name in which the product was produced"
                      display_string="Production State"/>
-      <dim_attribute name="production_country" type="STRING" description="Country name in which the product was produced"
+      <dim_attribute name="production_country" _type="STRING" description="Country name in which the product was produced"
                      display_string="Production Country"/>
       </hierarchy>
     </dim_attribute>
-    <dim_attribute name="delivery_city_name" type="STRING" description="City name to which the product was delivered"
+    <dim_attribute name="delivery_city_name" _type="STRING" description="City name to which the product was delivered"
                    display_string="Delivery City">
       <chain_ref_column chain_name="delivery_city" ref_col="name" />
     </dim_attribute>
   </dim_attributes>
   <expressions>
-    <expression name="profit" type="DOUBLE" display_string="Profit">
+    <expression name="profit" _type="DOUBLE" display_string="Profit">
       <expr_spec expr="store_sales - store_cost"/>
     </expression>
-    <expression name="promotion_sales" type="DOUBLE" display_string="Promotion sales">
+    <expression name="promotion_sales" _type="DOUBLE" display_string="Promotion sales">
       <expr_spec expr="sum(case when promotion_id = 0 then 0 else store_sales end)" start_time='2015-04-12T00:00:00'/>
       <expr_spec expr="sum(case when promotion_id = -1 then 0 when promotion_id = -1 then 0 else store_sales end)"
        end_time="2015-04-12T00:00:00"/>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/sales-raw-fact.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-raw-fact.xml b/lens-examples/src/main/resources/sales-raw-fact.xml
index 4c2d571..05d925b 100644
--- a/lens-examples/src/main/resources/sales-raw-fact.xml
+++ b/lens-examples/src/main/resources/sales-raw-fact.xml
@@ -22,23 +22,23 @@
 <x_fact_table cube_name="sales" name="sales_raw_fact" weight="500.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="" name="order_time" type="TIMESTAMP"/>
-    <column comment="" name="delivery_time" type="TIMESTAMP"/>
-    <column comment="" name="customer_id" type="INT"/>
-    <column comment="" name="product_id" type="INT"/>
-    <column comment="" name="order_id" type="INT"/>
-    <column comment="" name="promotion_id" type="INT"/>
-    <column comment="" name="order_line_number" type="INT"/>
-    <column comment="" name="customer_city_id" type="INT"/>
-    <column comment="" name="production_city_id" type="INT"/>
-    <column comment="" name="delivery_city_id" type="INT"/>
-    <column comment="" name="unit_sales" type="BIGINT"/>
-    <column comment="" name="store_sales" type="DOUBLE"/>
-    <column comment="" name="store_cost" type="DOUBLE"/>
-    <column comment="" name="line_item_quantity" type="INT"/>
-    <column comment="" name="line_item_product_price" type="INT"/>
-    <column comment="" name="line_item_discount_amount" type="INT"/>
-    <column comment="" name="line_item_tax" type="INT"/>
+    <column comment="" name="order_time" _type="TIMESTAMP"/>
+    <column comment="" name="delivery_time" _type="TIMESTAMP"/>
+    <column comment="" name="customer_id" _type="INT"/>
+    <column comment="" name="product_id" _type="INT"/>
+    <column comment="" name="order_id" _type="INT"/>
+    <column comment="" name="promotion_id" _type="INT"/>
+    <column comment="" name="order_line_number" _type="INT"/>
+    <column comment="" name="customer_city_id" _type="INT"/>
+    <column comment="" name="production_city_id" _type="INT"/>
+    <column comment="" name="delivery_city_id" _type="INT"/>
+    <column comment="" name="unit_sales" _type="BIGINT"/>
+    <column comment="" name="store_sales" _type="DOUBLE"/>
+    <column comment="" name="store_cost" _type="DOUBLE"/>
+    <column comment="" name="line_item_quantity" _type="INT"/>
+    <column comment="" name="line_item_product_price" _type="INT"/>
+    <column comment="" name="line_item_discount_amount" _type="INT"/>
+    <column comment="" name="line_item_tax" _type="INT"/>
   </columns>
   <properties>
     <property name="cube.fact.is.aggregated" value="false"/>
@@ -51,9 +51,9 @@
       <storage_name>local</storage_name>
       <table_desc external="true" field_delimiter="," table_location="/tmp/examples/rawfact">
         <part_cols>
-          <column comment="Process time partition" name="pt" type="STRING"/>
-          <column comment="Order time partition" name="ot" type="STRING"/>
-          <column comment="Delivery time partition" name="dt" type="STRING"/>
+          <column comment="Process time partition" name="pt" _type="STRING"/>
+          <column comment="Order time partition" name="ot" _type="STRING"/>
+          <column comment="Delivery time partition" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>pt</time_part_cols>
         <time_part_cols>ot</time_part_cols>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/sample-cube.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sample-cube.xml b/lens-examples/src/main/resources/sample-cube.xml
index 3b5abea..4046e82 100644
--- a/lens-examples/src/main/resources/sample-cube.xml
+++ b/lens-examples/src/main/resources/sample-cube.xml
@@ -25,19 +25,19 @@
     <property name="cube.sample_cube.timed.dimensions.list" value="dt"/>
   </properties>
   <measures>
-    <measure name="measure1" type="BIGINT"/>
-    <measure name="measure2" type="INT" default_aggr="SUM"/>
-    <measure name="measure3" type="FLOAT" default_aggr="MAX" start_time='2013-12-12T00:00:00'/>
-    <measure name="measure4" type="DOUBLE" default_aggr="MIN"/>
+    <measure name="measure1" _type="BIGINT"/>
+    <measure name="measure2" _type="INT" default_aggr="SUM"/>
+    <measure name="measure3" _type="FLOAT" default_aggr="MAX" start_time='2013-12-12T00:00:00'/>
+    <measure name="measure4" _type="DOUBLE" default_aggr="MIN"/>
   </measures>
   <dim_attributes>
-    <dim_attribute name="dim1" type="INT"/>
-    <dim_attribute name="dim2" type="INT" start_time='2013-12-01T00:00:00'/>
-    <dim_attribute name="dim3" type="INT">
+    <dim_attribute name="dim1" _type="INT"/>
+    <dim_attribute name="dim2" _type="INT" start_time='2013-12-01T00:00:00'/>
+    <dim_attribute name="dim3" _type="INT">
     </dim_attribute>
   </dim_attributes>
   <expressions>
-    <expression name="expr_msr5" type="DOUBLE">
+    <expression name="expr_msr5" _type="DOUBLE">
       <expr_spec expr = "measure3 + measure4" start_time='2013-12-12T00:00:00' />
       <expr_spec expr = "measure3 + measure4 + 0.01" end_time='2013-12-12T00:00:00'/>
     </expression>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/sample-db-only-dimension.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sample-db-only-dimension.xml b/lens-examples/src/main/resources/sample-db-only-dimension.xml
index 2e8aa64..809e66e 100644
--- a/lens-examples/src/main/resources/sample-db-only-dimension.xml
+++ b/lens-examples/src/main/resources/sample-db-only-dimension.xml
@@ -22,10 +22,10 @@
 <x_dimension name="sample_db_dim" xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
   xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <attributes>
-    <dim_attribute name="id" type="INT"/>
-    <dim_attribute name="name" type="STRING"/>
-    <dim_attribute name="detail" type="STRING" start_time='2013-12-01T00:00:00'/>
-    <dim_attribute name="d2id" type="INT" start_time='2013-12-01T00:00:00'/>
+    <dim_attribute name="id" _type="INT"/>
+    <dim_attribute name="name" _type="STRING"/>
+    <dim_attribute name="detail" _type="STRING" start_time='2013-12-01T00:00:00'/>
+    <dim_attribute name="d2id" _type="INT" start_time='2013-12-01T00:00:00'/>
   </attributes>
 
   <join_chains>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/sample-dimension.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sample-dimension.xml b/lens-examples/src/main/resources/sample-dimension.xml
index 3c2589c..778a4c4 100644
--- a/lens-examples/src/main/resources/sample-dimension.xml
+++ b/lens-examples/src/main/resources/sample-dimension.xml
@@ -22,10 +22,10 @@
 <x_dimension name="sample_dim" xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
   xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <attributes>
-    <dim_attribute name="id" type="INT"/>
-    <dim_attribute name="name" type="STRING"/>
-    <dim_attribute name="detail" type="STRING" start_time='2013-12-01T00:00:00'/>
-    <dim_attribute name="d2id" type="INT" start_time='2013-12-01T00:00:00'/>
+    <dim_attribute name="id" _type="INT"/>
+    <dim_attribute name="name" _type="STRING"/>
+    <dim_attribute name="detail" _type="STRING" start_time='2013-12-01T00:00:00'/>
+    <dim_attribute name="d2id" _type="INT" start_time='2013-12-01T00:00:00'/>
   </attributes>
 
   <join_chains>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/sample-dimension2.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sample-dimension2.xml b/lens-examples/src/main/resources/sample-dimension2.xml
index 9740507..0f4fad8 100644
--- a/lens-examples/src/main/resources/sample-dimension2.xml
+++ b/lens-examples/src/main/resources/sample-dimension2.xml
@@ -22,9 +22,9 @@
 <x_dimension name="sample_dim2" xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
   xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <attributes>
-    <dim_attribute name="id" type="INT"/>
-    <dim_attribute name="name" type="STRING"/>
-    <dim_attribute name="detail2" type="STRING" start_time='2013-12-01T00:00:00'/>
+    <dim_attribute name="id" _type="INT"/>
+    <dim_attribute name="name" _type="STRING"/>
+    <dim_attribute name="detail2" _type="STRING" start_time='2013-12-01T00:00:00'/>
   </attributes>
   <properties>
     <property name="dimension.sample_dim2.timed.dimension" value="dt"/>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLApp.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLApp.java b/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLApp.java
index e6e3c02..002d420 100644
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLApp.java
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLApp.java
@@ -24,8 +24,11 @@ import java.util.Set;
 import javax.ws.rs.ApplicationPath;
 import javax.ws.rs.core.Application;
 
+import org.apache.lens.api.util.MoxyJsonConfigurationContextResolver;
+
 import org.glassfish.jersey.filter.LoggingFilter;
 import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.glassfish.jersey.moxy.json.MoxyJsonFeature;
 
 @ApplicationPath("/ml")
 public class MLApp extends Application {
@@ -44,6 +47,8 @@ public class MLApp extends Application {
     classes.add(MLServiceResource.class);
     classes.add(MultiPartFeature.class);
     classes.add(LoggingFilter.class);
+    classes.add(MoxyJsonConfigurationContextResolver.class);
+    classes.add(MoxyJsonFeature.class);
     for (Class<?> cls : additionalClasses) {
       classes.add(cls);
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
index ef3d53e..3493709 100644
--- a/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
+++ b/lens-ml-lib/src/test/java/org/apache/lens/ml/TestMLRunner.java
@@ -39,8 +39,6 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 
-import org.glassfish.jersey.client.ClientConfig;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
 import org.testng.Assert;
 import org.testng.annotations.AfterTest;
 import org.testng.annotations.BeforeTest;
@@ -48,7 +46,6 @@ import org.testng.annotations.Test;
 
 import lombok.extern.slf4j.Slf4j;
 
-
 @Test
 @Slf4j
 public class TestMLRunner extends LensJerseyTest {
@@ -71,11 +68,6 @@ public class TestMLRunner extends LensJerseyTest {
     return UriBuilder.fromUri("http://localhost/").port(getTestPort()).path("/lensapi").build();
   }
 
-  @Override
-  protected void configureClient(ClientConfig config) {
-    config.register(MultiPartFeature.class);
-  }
-
   @BeforeTest
   public void setUp() throws Exception {
     super.setUp();

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server-api/src/main/java/org/apache/lens/server/api/driver/InMemoryResultSet.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/InMemoryResultSet.java b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/InMemoryResultSet.java
index f6434da..0d64471 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/InMemoryResultSet.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/InMemoryResultSet.java
@@ -80,7 +80,7 @@ public abstract class InMemoryResultSet extends LensResultSet {
    * @see org.apache.lens.server.api.driver.LensResultSet#toQueryResult()
    */
   public InMemoryQueryResult toQueryResult() throws LensException {
-    List<ResultRow> rows = new ArrayList<ResultRow>();
+    List<ResultRow> rows = new ArrayList<>();
     while (hasNext()) {
       rows.add(next());
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/pom.xml
----------------------------------------------------------------------
diff --git a/lens-server/pom.xml b/lens-server/pom.xml
index ba91e76..064197d 100644
--- a/lens-server/pom.xml
+++ b/lens-server/pom.xml
@@ -126,19 +126,10 @@
       <artifactId>jersey-media-multipart</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.glassfish.jersey.media</groupId>
-      <artifactId>jersey-media-json-jackson</artifactId>
-    </dependency>
-    <dependency>
       <groupId>javax.xml.bind</groupId>
       <artifactId>jaxb-api</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.glassfish.jersey.media</groupId>
-      <artifactId>jersey-media-moxy</artifactId>
-    </dependency>
-
-    <dependency>
       <groupId>org.glassfish.grizzly</groupId>
       <artifactId>grizzly-framework</artifactId>
     </dependency>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/BaseApp.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/BaseApp.java b/lens-server/src/main/java/org/apache/lens/server/BaseApp.java
new file mode 100644
index 0000000..07f6837
--- /dev/null
+++ b/lens-server/src/main/java/org/apache/lens/server/BaseApp.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.server;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.ws.rs.core.Application;
+
+import org.apache.lens.api.jaxb.LensJAXBContextResolver;
+import org.apache.lens.api.util.MoxyJsonConfigurationContextResolver;
+import org.apache.lens.server.error.LensJAXBValidationExceptionMapper;
+
+import org.glassfish.jersey.filter.LoggingFilter;
+import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.glassfish.jersey.moxy.json.MoxyJsonFeature;
+
+public abstract class BaseApp extends Application {
+
+  @Override
+  public Set<Class<?>> getClasses() {
+    final Set<Class<?>> classes = new HashSet<>();
+    // register root resource
+    classes.add(getResource());
+    classes.add(MultiPartFeature.class);
+    classes.add(LensJAXBContextResolver.class);
+    classes.add(LensJAXBValidationExceptionMapper.class);
+    classes.add(LensRequestContextInitFilter.class);
+    classes.add(LoggingFilter.class);
+    classes.add(LensApplicationListener.class);
+    classes.add(MoxyJsonConfigurationContextResolver.class);
+    classes.add(MoxyJsonFeature.class);
+    return classes;
+  }
+
+  protected abstract Class getResource();
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/LensServer.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/LensServer.java b/lens-server/src/main/java/org/apache/lens/server/LensServer.java
index 61eb37f..092ecda 100644
--- a/lens-server/src/main/java/org/apache/lens/server/LensServer.java
+++ b/lens-server/src/main/java/org/apache/lens/server/LensServer.java
@@ -121,7 +121,6 @@ public class LensServer {
 
   private ResourceConfig getUIApp() {
     ResourceConfig uiApp = ResourceConfig.forApplicationClass(UIApp.class);
-    uiApp.register(new LoggingFilter(Logger.getLogger(LensServer.class.getName() + ".ui_request"), true));
     uiApp.setApplicationName("Lens UI");
     return uiApp;
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/ServerModeFilter.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/ServerModeFilter.java b/lens-server/src/main/java/org/apache/lens/server/ServerModeFilter.java
index 630b95e..e87fe46 100644
--- a/lens-server/src/main/java/org/apache/lens/server/ServerModeFilter.java
+++ b/lens-server/src/main/java/org/apache/lens/server/ServerModeFilter.java
@@ -39,25 +39,28 @@ public class ServerModeFilter implements ContainerRequestFilter {
     switch (LensServices.get().getServiceMode()) {
     case READ_ONLY:
       // Allows all requests on session and only GET everywhere
-      if (!requestContext.getUriInfo().getPath().startsWith("/session")) {
+      if (!requestContext.getUriInfo().getPath().startsWith("session")) {
         if (!requestContext.getMethod().equals("GET")) {
-          throw new NotAllowedException("Server is in readonly mode", "GET", (String[]) null);
+          throw new NotAllowedException("Server is in readonly mode. Request on path:"
+            + requestContext.getUriInfo().getPath(), "GET", (String[]) null);
         }
       }
       break;
     case METASTORE_READONLY:
       // Allows GET on metastore and all other requests
-      if (requestContext.getUriInfo().getPath().startsWith("/metastore")) {
+      if (requestContext.getUriInfo().getPath().startsWith("metastore")) {
         if (!requestContext.getMethod().equals("GET")) {
-          throw new NotAllowedException("Metastore is in readonly mode", "GET", (String[]) null);
+          throw new NotAllowedException("Metastore is in readonly mode. Request on path:"
+            + requestContext.getUriInfo().getPath(), "GET", (String[]) null);
         }
       }
       break;
     case METASTORE_NODROP:
       // Does not allows DROP on metastore, all other request are allowed
-      if (requestContext.getUriInfo().getPath().startsWith("/metastore")) {
+      if (requestContext.getUriInfo().getPath().startsWith("metastore")) {
         if (requestContext.getMethod().equals("DELETE")) {
-          throw new NotAllowedException("Metastore is in nodrop mode", "GET", new String[]{"PUT", "POST"});
+          throw new NotAllowedException("Metastore is in nodrop mode. Request on path:"
+            + requestContext.getUriInfo().getPath(), "GET", new String[]{"PUT", "POST"});
         }
       }
       break;

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
index fc67df1..a1acd1a 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
@@ -701,7 +701,7 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
         storageName);
       List<Partition> parts = client.getPartitionsByFilter(storageTableName, filter);
       List<String> timePartCols = client.getTimePartColNamesOfTable(storageTableName);
-      return xpartitionListFromPartitionList(parts, timePartCols);
+      return xpartitionListFromPartitionList(fact, parts, timePartCols);
     } catch (HiveException exc) {
       throw new LensException(exc);
     } finally {
@@ -737,31 +737,31 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
     }
   }
 
-  private CubeDimensionTable checkDimensionStorage(LensSessionHandle sessionid, String dimension, String storage)
+  private CubeDimensionTable checkDimTableStorage(LensSessionHandle sessionid, String dimTable, String storage)
     throws HiveException, LensException {
     CubeMetastoreClient client = getClient(sessionid);
-    if (!client.isDimensionTable(dimension)) {
-      throw new NotFoundException("Dimension table not found: " + dimension);
+    if (!client.isDimensionTable(dimTable)) {
+      throw new NotFoundException("Dimension table not found: " + dimTable);
     }
-    CubeDimensionTable cdt = client.getDimensionTable(dimension);
+    CubeDimensionTable cdt = client.getDimensionTable(dimTable);
     if (!cdt.getStorages().contains(storage)) {
-      throw new NotFoundException("Storage " + storage + " not found for dimension " + dimension);
+      throw new NotFoundException("Storage " + storage + " not found for dimension table " + dimTable);
     }
     return cdt;
   }
 
   @Override
   public XPartitionList getAllPartitionsOfDimTableStorage(
-    LensSessionHandle sessionid, String dimension, String storageName, String filter)
+    LensSessionHandle sessionid, String dimTable, String storageName, String filter)
     throws LensException {
     try {
       acquire(sessionid);
-      checkDimensionStorage(sessionid, dimension, storageName);
+      checkDimTableStorage(sessionid, dimTable, storageName);
       CubeMetastoreClient client = getClient(sessionid);
-      String storageTableName = MetastoreUtil.getFactOrDimtableStorageTableName(dimension, storageName);
+      String storageTableName = MetastoreUtil.getFactOrDimtableStorageTableName(dimTable, storageName);
       List<Partition> partitions = client.getPartitionsByFilter(storageTableName, filter);
       List<String> timePartCols = client.getTimePartColNamesOfTable(storageTableName);
-      return xpartitionListFromPartitionList(partitions, timePartCols);
+      return xpartitionListFromPartitionList(dimTable, partitions, timePartCols);
     } catch (HiveException exc) {
       throw new LensException(exc);
     } finally {
@@ -774,7 +774,7 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
     String dimTblName, String storageName, XPartition partition) throws LensException {
     try {
       acquire(sessionid);
-      checkDimensionStorage(sessionid, dimTblName, storageName);
+      checkDimTableStorage(sessionid, dimTblName, storageName);
       return getClient(sessionid).addPartition(storagePartSpecFromXPartition(partition), storageName).size();
     } catch (HiveException exc) {
       throw new LensException(exc);
@@ -828,7 +828,7 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
     String dimTblName, String storageName, XPartitionList partitions) throws LensException {
     try {
       acquire(sessionid);
-      checkDimensionStorage(sessionid, dimTblName, storageName);
+      checkDimTableStorage(sessionid, dimTblName, storageName);
       return getClient(sessionid).addPartitions(storagePartSpecListFromXPartitionList(partitions), storageName).size();
     } catch (HiveException exc) {
       throw new LensException(exc);

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
index 70323d3..6fd19a0 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
@@ -792,20 +792,22 @@ public final class JAXBUtils {
     return nonTimePartSpec;
   }
 
-  public static XPartitionList xpartitionListFromPartitionList(List<Partition> partitions, List<String> timePartCols)
-    throws HiveException {
+  public static XPartitionList xpartitionListFromPartitionList(String cubeTableName, List<Partition> partitions,
+    List<String> timePartCols) throws HiveException {
     XPartitionList xPartitionList = new XPartitionList();
     xPartitionList.getPartition();
     if (partitions != null) {
       for (Partition partition : partitions) {
-        xPartitionList.getPartition().add(xpartitionFromPartition(partition, timePartCols));
+        xPartitionList.getPartition().add(xpartitionFromPartition(cubeTableName, partition, timePartCols));
       }
     }
     return xPartitionList;
   }
 
-  public static XPartition xpartitionFromPartition(Partition p, List<String> timePartCols) throws HiveException {
+  public static XPartition xpartitionFromPartition(String cubeTableName, Partition p, List<String> timePartCols)
+    throws HiveException {
     XPartition xp = new XPartition();
+    xp.setFactOrDimensionTableName(cubeTableName);
     xp.setPartitionParameters(new XProperties());
     xp.setSerdeParameters(new XProperties());
     xp.setName(p.getCompleteName());

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreApp.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreApp.java b/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreApp.java
index 520c698..3ea11c1 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreApp.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreApp.java
@@ -18,34 +18,15 @@
  */
 package org.apache.lens.server.metastore;
 
-
-import java.util.HashSet;
-import java.util.Set;
-
 import javax.ws.rs.ApplicationPath;
-import javax.ws.rs.core.Application;
 
-import org.apache.lens.api.jaxb.LensJAXBContextResolver;
-import org.apache.lens.server.LensApplicationListener;
-import org.apache.lens.server.LensRequestContextInitFilter;
-import org.apache.lens.server.error.LensJAXBValidationExceptionMapper;
-
-import org.glassfish.jersey.filter.LoggingFilter;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.apache.lens.server.BaseApp;
 
 @ApplicationPath("/")
-public class MetastoreApp extends Application {
+public class MetastoreApp extends BaseApp {
+
   @Override
-  public Set<Class<?>> getClasses() {
-    final Set<Class<?>> classes = new HashSet<Class<?>>();
-    // register root resource
-    classes.add(MetastoreResource.class);
-    classes.add(LoggingFilter.class);
-    classes.add(MultiPartFeature.class);
-    classes.add(LensApplicationListener.class);
-    classes.add(LensJAXBContextResolver.class);
-    classes.add(LensRequestContextInitFilter.class);
-    classes.add(LensJAXBValidationExceptionMapper.class);
-    return classes;
+  protected Class getResource() {
+    return MetastoreResource.class;
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java b/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
index fb937c6..e7d545c 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
@@ -40,8 +40,6 @@ import org.apache.commons.lang.NotImplementedException;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 
-import org.glassfish.jersey.media.multipart.FormDataParam;
-
 import com.google.common.collect.Lists;
 import lombok.extern.slf4j.Slf4j;
 
@@ -850,11 +848,9 @@ public class MetastoreResource {
    * @return {@link APIResult} with state {@link Status#SUCCEEDED}, if create was successful. {@link APIResult} with
    * state {@link Status#FAILED}, if create has failed
    */
-  @Consumes({MediaType.MULTIPART_FORM_DATA})
   @POST
   @Path("/facts")
-  public APIResult createFactTable(@FormDataParam("sessionid") LensSessionHandle sessionid,
-    @FormDataParam("fact") XFactTable fact)
+  public APIResult createFactTable(@QueryParam("sessionid") LensSessionHandle sessionid, XFactTable fact)
     throws LensException {
     checkSessionId(sessionid);
     try {
@@ -1093,6 +1089,8 @@ public class MetastoreResource {
     XPartition partition) {
     checkSessionId(sessionid);
     checkNonNullArgs("Partition is null", partition);
+    checkNonNullArgs("Partition elements are null", partition.getFactOrDimensionTableName(),
+      partition.getUpdatePeriod());
     try {
       return successOrPartialOrFailure(getSvc().addPartitionToFactStorage(sessionid, factName, storage, partition), 1);
     } catch (LensException exc) {
@@ -1121,6 +1119,8 @@ public class MetastoreResource {
     XPartition partition) {
     checkSessionId(sessionid);
     checkNonNullArgs("Partition is null", partition);
+    checkNonNullArgs("Partition elements are null", partition.getFactOrDimensionTableName(),
+      partition.getUpdatePeriod());
     try {
       getSvc().updatePartition(sessionid, factName, storage, partition);
     } catch (LensException exc) {
@@ -1239,8 +1239,8 @@ public class MetastoreResource {
    */
   @POST
   @Path("/dimtables")
-  public APIResult createDimensionTable(@FormDataParam("sessionid") LensSessionHandle sessionid,
-    @FormDataParam("dimensionTable") XDimensionTable dimensionTable) {
+  public APIResult createDimensionTable(@QueryParam("sessionid") LensSessionHandle sessionid,
+                                        XDimensionTable dimensionTable) {
     checkSessionId(sessionid);
     try {
       getSvc().createDimensionTable(sessionid, dimensionTable);
@@ -1514,6 +1514,8 @@ public class MetastoreResource {
     XPartition partition) {
     checkSessionId(sessionid);
     checkNonNullArgs("Partition is null", partition);
+    checkNonNullArgs("Partition elements are null", partition.getFactOrDimensionTableName(),
+      partition.getUpdatePeriod());
     try {
       return successOrPartialOrFailure(getSvc().addPartitionToDimStorage(sessionid, dimTableName, storage, partition),
         1);
@@ -1538,10 +1540,12 @@ public class MetastoreResource {
   @Path("/dimtables/{dimTableName}/storages/{storage}/partition")
   public APIResult updatePartitionOfDimStorage(@QueryParam("sessionid") LensSessionHandle sessionid,
     @PathParam("dimTableName") String dimTableName,
-    @PathParam("storage") String storage,
+                                               @PathParam("storage") String storage,
     XPartition partition) {
     checkSessionId(sessionid);
     checkNonNullArgs("Partition is null", partition);
+    checkNonNullArgs("Partition elements are null", partition.getFactOrDimensionTableName(),
+      partition.getUpdatePeriod());
     try {
       getSvc().updatePartition(sessionid, dimTableName, storage, partition);
     } catch (LensException exc) {

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/query/QueryApp.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryApp.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryApp.java
index 9db034a..9126268 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryApp.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryApp.java
@@ -18,32 +18,18 @@
  */
 package org.apache.lens.server.query;
 
-import java.util.HashSet;
-import java.util.Set;
-
 import javax.ws.rs.ApplicationPath;
-import javax.ws.rs.core.Application;
-
-import org.apache.lens.server.LensApplicationListener;
-import org.apache.lens.server.LensRequestContextInitFilter;
 
-import org.glassfish.jersey.filter.LoggingFilter;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.apache.lens.server.BaseApp;
 
 /**
  * The Class QueryApp.
  */
 @ApplicationPath("/queryapi")
-public class QueryApp extends Application {
+public class QueryApp extends BaseApp {
+
   @Override
-  public Set<Class<?>> getClasses() {
-    final Set<Class<?>> classes = new HashSet<Class<?>>();
-    // register root resource
-    classes.add(QueryServiceResource.class);
-    classes.add(MultiPartFeature.class);
-    classes.add(LensRequestContextInitFilter.class);
-    classes.add(LoggingFilter.class);
-    classes.add(LensApplicationListener.class);
-    return classes;
+  protected Class getResource() {
+    return QueryServiceResource.class;
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/query/QueryServiceResource.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryServiceResource.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryServiceResource.java
index bb4cfd2..6e1a709 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryServiceResource.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryServiceResource.java
@@ -207,7 +207,7 @@ public class QueryServiceResource {
   @Consumes({MediaType.MULTIPART_FORM_DATA})
   @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN})
   @MultiPurposeResource(formParamName = "operation")
-  public LensAPIResult<? extends QuerySubmitResult> query(@FormDataParam("sessionid") LensSessionHandle sessionid,
+  public LensAPIResult<QuerySubmitResult> query(@FormDataParam("sessionid") LensSessionHandle sessionid,
       @FormDataParam("query") String query, @FormDataParam("operation") String operation,
       @FormDataParam("conf") LensConf conf, @DefaultValue("30000") @FormDataParam("timeoutmillis") Long timeoutmillis,
       @DefaultValue("") @FormDataParam("queryName") String queryName) throws LensException {
@@ -351,7 +351,7 @@ public class QueryServiceResource {
   @Consumes({MediaType.MULTIPART_FORM_DATA})
   @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN})
   @MultiPurposeResource(formParamName = "operation")
-  public LensAPIResult<? extends QuerySubmitResult> prepareQuery(
+  public LensAPIResult<QuerySubmitResult> prepareQuery(
       @FormDataParam("sessionid") LensSessionHandle sessionid, @FormDataParam("query") String query,
       @DefaultValue("") @FormDataParam("operation") String operation, @FormDataParam("conf") LensConf conf,
       @DefaultValue("") @FormDataParam("queryName") String queryName) throws LensException {

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryApp.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryApp.java b/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryApp.java
index e55ed13..494df08 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryApp.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryApp.java
@@ -18,31 +18,17 @@
  */
 package org.apache.lens.server.query.save;
 
-import java.util.HashSet;
-import java.util.Set;
-
 import javax.ws.rs.ApplicationPath;
-import javax.ws.rs.core.Application;
-
-import org.apache.lens.server.LensApplicationListener;
-import org.apache.lens.server.LensRequestContextInitFilter;
 
-import org.glassfish.jersey.filter.LoggingFilter;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.apache.lens.server.BaseApp;
 
 /**
  * The Class SavedQueryApp.
  */
 @ApplicationPath("/savedquery")
-public class SavedQueryApp extends Application {
+public class SavedQueryApp extends BaseApp {
   @Override
-  public Set<Class<?>> getClasses() {
-    final Set<Class<?>> classes = new HashSet<Class<?>>();
-    classes.add(SavedQueryResource.class);
-    classes.add(MultiPartFeature.class);
-    classes.add(LensRequestContextInitFilter.class);
-    classes.add(LoggingFilter.class);
-    classes.add(LensApplicationListener.class);
-    return classes;
+  protected Class getResource() {
+    return SavedQueryResource.class;
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryResource.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryResource.java b/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryResource.java
index 5c247be..02541c5 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryResource.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryResource.java
@@ -35,7 +35,7 @@ import javax.ws.rs.core.UriInfo;
 import org.apache.lens.api.LensConf;
 import org.apache.lens.api.LensSessionHandle;
 import org.apache.lens.api.error.ErrorCollection;
-import org.apache.lens.api.query.QuerySubmitResult;
+import org.apache.lens.api.query.QueryHandle;
 import org.apache.lens.api.query.save.ListResponse;
 import org.apache.lens.api.query.save.ParameterParserResponse;
 import org.apache.lens.api.query.save.ResourceModifiedResponse;
@@ -253,7 +253,7 @@ public class SavedQueryResource {
   @POST
   @Path("/savedqueries/{id}")
   @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, MediaType.TEXT_PLAIN})
-  public LensAPIResult<? extends QuerySubmitResult> run(
+  public LensAPIResult<QueryHandle> run(
     @PathParam("id") long id,
     @Context UriInfo info,
     @FormDataParam("sessionid") LensSessionHandle sessionid,

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/scheduler/SchedulerApp.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/scheduler/SchedulerApp.java b/lens-server/src/main/java/org/apache/lens/server/scheduler/SchedulerApp.java
index 0ea8544..91bc54f 100644
--- a/lens-server/src/main/java/org/apache/lens/server/scheduler/SchedulerApp.java
+++ b/lens-server/src/main/java/org/apache/lens/server/scheduler/SchedulerApp.java
@@ -18,28 +18,18 @@
  */
 package org.apache.lens.server.scheduler;
 
-import java.util.HashSet;
-import java.util.Set;
-
 import javax.ws.rs.ApplicationPath;
-import javax.ws.rs.core.Application;
-
-import org.apache.lens.server.LensApplicationListener;
 
-import org.glassfish.jersey.filter.LoggingFilter;
+import org.apache.lens.server.BaseApp;
 
 /**
  * The Class SchedulerApp.
  */
-@ApplicationPath("/queryscheduler")
-public class SchedulerApp extends Application {
+@ApplicationPath("/scheduler")
+public class SchedulerApp extends BaseApp {
+
   @Override
-  public Set<Class<?>> getClasses() {
-    final Set<Class<?>> classes = new HashSet<Class<?>>();
-    // register root resource
-    classes.add(ScheduleResource.class);
-    classes.add(LensApplicationListener.class);
-    classes.add(LoggingFilter.class);
-    return classes;
+  protected Class getResource() {
+    return ScheduleResource.class;
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/session/SessionApp.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/session/SessionApp.java b/lens-server/src/main/java/org/apache/lens/server/session/SessionApp.java
index a499968..a56bc89 100644
--- a/lens-server/src/main/java/org/apache/lens/server/session/SessionApp.java
+++ b/lens-server/src/main/java/org/apache/lens/server/session/SessionApp.java
@@ -18,31 +18,18 @@
  */
 package org.apache.lens.server.session;
 
-import java.util.HashSet;
-import java.util.Set;
-
 import javax.ws.rs.ApplicationPath;
-import javax.ws.rs.core.Application;
-
-import org.apache.lens.server.LensApplicationListener;
-
-import org.glassfish.jersey.filter.LoggingFilter;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
 
+import org.apache.lens.server.BaseApp;
 /**
  * The Class SessionApp.
  */
 @ApplicationPath("/session")
-public class SessionApp extends Application {
+public class SessionApp extends BaseApp {
+
   @Override
-  public Set<Class<?>> getClasses() {
-    final Set<Class<?>> classes = new HashSet<Class<?>>();
-    // register root resource
-    classes.add(SessionResource.class);
-    classes.add(MultiPartFeature.class);
-    classes.add(LoggingFilter.class);
-    classes.add(LensApplicationListener.class);
-    return classes;
+  protected Class getResource() {
+    return SessionResource.class;
   }
 
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/session/SessionResource.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/session/SessionResource.java b/lens-server/src/main/java/org/apache/lens/server/session/SessionResource.java
index 3ba5edd..ac77418 100644
--- a/lens-server/src/main/java/org/apache/lens/server/session/SessionResource.java
+++ b/lens-server/src/main/java/org/apache/lens/server/session/SessionResource.java
@@ -45,7 +45,7 @@ import lombok.extern.slf4j.Slf4j;
  * <p></p>
  * This provides api for all things in session.
  */
-@Path("/session")
+@Path("session")
 @Slf4j
 public class SessionResource {
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/java/org/apache/lens/server/ui/UIApp.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/ui/UIApp.java b/lens-server/src/main/java/org/apache/lens/server/ui/UIApp.java
index c22a1aa..de4ce33 100644
--- a/lens-server/src/main/java/org/apache/lens/server/ui/UIApp.java
+++ b/lens-server/src/main/java/org/apache/lens/server/ui/UIApp.java
@@ -18,32 +18,28 @@
  */
 package org.apache.lens.server.ui;
 
-import java.util.HashSet;
 import java.util.Set;
 
 import javax.ws.rs.ApplicationPath;
-import javax.ws.rs.core.Application;
 
-import org.apache.lens.server.AuthenticationFilter;
-import org.apache.lens.server.LensApplicationListener;
-
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.apache.lens.server.BaseApp;
 
 /**
  * The Class UIApp.
  */
 @ApplicationPath("/ui")
-public class UIApp extends Application {
+public class UIApp extends BaseApp {
+
+  @Override
+  protected Class getResource() {
+    return StaticFileResource.class;
+  }
 
   public Set<Class<?>> getClasses() {
-    final Set<Class<?>> classes = new HashSet<Class<?>>();
-    classes.add(StaticFileResource.class);
+    final Set<Class<?>> classes = super.getClasses();
     classes.add(QueryServiceUIResource.class);
     classes.add(SessionUIResource.class);
     classes.add(MetastoreUIResource.class);
-    classes.add(MultiPartFeature.class);
-    classes.add(AuthenticationFilter.class);
-    classes.add(LensApplicationListener.class);
     return classes;
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/main/resources/lensserver-default.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/main/resources/lensserver-default.xml b/lens-server/src/main/resources/lensserver-default.xml
index 881c159..a711d03 100644
--- a/lens-server/src/main/resources/lensserver-default.xml
+++ b/lens-server/src/main/resources/lensserver-default.xml
@@ -455,7 +455,7 @@
 
   <property>
     <name>lens.server.ws.featurenames</name>
-    <value>multipart</value>
+    <value>multipart,moxyjson,moxyjsonconfigresovler</value>
     <description>These JAX-RS Feature(s) would be started in the specified order when lens-server starts up
     </description>
   </property>
@@ -467,6 +467,16 @@
   </property>
 
   <property>
+    <name>lens.server.moxyjson.ws.feature.impl</name>
+    <value>org.glassfish.jersey.moxy.json.MoxyJsonFeature</value>
+    <description>Enable Moxy json feature </description>
+  </property>
+  <property>
+    <name>lens.server.moxyjsonconfigresovler.ws.feature.impl</name>
+    <value>org.apache.lens.api.util.MoxyJsonConfigurationContextResolver</value>
+    <description>Moxy json configuration resolver</description>
+  </property>
+  <property>
     <name>lens.server.ws.filternames</name>
     <value>authentication,consistentState,serverMode</value>
     <description>These JAX-RS filters would be started in the specified order when lens-server starts up</description>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/LensAllApplicationJerseyTest.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/LensAllApplicationJerseyTest.java b/lens-server/src/test/java/org/apache/lens/server/LensAllApplicationJerseyTest.java
index 56a3b75..a3e0a19 100644
--- a/lens-server/src/test/java/org/apache/lens/server/LensAllApplicationJerseyTest.java
+++ b/lens-server/src/test/java/org/apache/lens/server/LensAllApplicationJerseyTest.java
@@ -20,8 +20,7 @@ package org.apache.lens.server;
 
 import javax.ws.rs.core.Application;
 
-import org.glassfish.jersey.client.ClientConfig;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.glassfish.jersey.test.TestProperties;
 
 /**
  * The Class LensAllApplicationJerseyTest.
@@ -35,17 +34,8 @@ public abstract class LensAllApplicationJerseyTest extends LensJerseyTest {
      */
   @Override
   protected Application configure() {
+    enable(TestProperties.LOG_TRAFFIC);
+    enable(TestProperties.DUMP_ENTITY);
     return new LensApplication();
   }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.glassfish.jersey.test.JerseyTest#configureClient(org.glassfish.jersey.client.ClientConfig)
-   */
-  @Override
-  protected void configureClient(ClientConfig config) {
-    config.register(MultiPartFeature.class);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java b/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
index 8f52ddd..a00a86b 100644
--- a/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
+++ b/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
@@ -30,8 +30,12 @@ import java.net.URI;
 import java.util.List;
 import java.util.concurrent.ConcurrentLinkedQueue;
 
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.UriBuilder;
 
+import org.apache.lens.api.jaxb.LensJAXBContextResolver;
+import org.apache.lens.api.util.MoxyJsonConfigurationContextResolver;
 import org.apache.lens.driver.hive.TestRemoteHiveDriver;
 import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.metrics.LensMetricsUtil;
@@ -44,9 +48,13 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hive.service.Service;
 import org.apache.hive.service.Service.STATE;
 
+import org.glassfish.jersey.client.ClientConfig;
+import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.glassfish.jersey.moxy.json.MoxyJsonFeature;
 import org.glassfish.jersey.test.JerseyTest;
 import org.testng.annotations.AfterSuite;
 import org.testng.annotations.BeforeSuite;
+import org.testng.annotations.DataProvider;
 
 import com.google.common.collect.Lists;
 
@@ -59,6 +67,7 @@ import lombok.extern.slf4j.Slf4j;
 public abstract class LensJerseyTest extends JerseyTest {
 
   private int port = -1;
+  protected MediaType defaultMT = MediaType.APPLICATION_XML_TYPE;
 
   private final LogSegregationContext logSegregationContext = new MappedDiagnosticLogSegregationContext();
 
@@ -79,7 +88,7 @@ public abstract class LensJerseyTest extends JerseyTest {
     super.tearDown();
   }
   protected int getTestPort() {
-    if (!isPortAlreadyFound()) {
+    if (isPortAlreadyFound()) {
       return port;
     }
     ServerSocket socket = null;
@@ -109,6 +118,14 @@ public abstract class LensJerseyTest extends JerseyTest {
     return UriBuilder.fromUri(getUri()).path("lens-server").build();
   }
 
+  @Override
+  protected void configureClient(ClientConfig config) {
+    config.register(MultiPartFeature.class);
+    config.register(MoxyJsonFeature.class);
+    config.register(MoxyJsonConfigurationContextResolver.class);
+    config.register(LensJAXBContextResolver.class);
+  }
+
   public HiveConf getServerConf() {
     return LensServerConf.getHiveConf();
   }
@@ -213,6 +230,7 @@ public abstract class LensJerseyTest extends JerseyTest {
     LensServices.get().start();
     System.out.println("Lens services restarted!");
   }
+
   public static void waitForPurge(int allowUnpurgable,
     ConcurrentLinkedQueue<QueryExecutionServiceImpl.FinishedQuery> finishedQueries) throws InterruptedException {
     List<QueryExecutionServiceImpl.FinishedQuery> unPurgable = Lists.newArrayList();
@@ -228,4 +246,21 @@ public abstract class LensJerseyTest extends JerseyTest {
       Thread.sleep(5000);
     }
   }
+
+  @DataProvider(name = "mediaTypeData")
+  public Object[][] mediaTypeData() {
+    return new Object[][] {
+      {MediaType.APPLICATION_XML_TYPE},
+      {MediaType.APPLICATION_JSON_TYPE},
+    };
+  }
+
+  public static Entity getEntityForString(String o, MediaType mt) {
+    if (mt.equals(MediaType.APPLICATION_JSON_TYPE)) {
+      return Entity.json(o);
+    } else if (mt.equals(MediaType.APPLICATION_XML_TYPE)) {
+      return Entity.xml(o);
+    }
+    return null;
+  }
 }


[28/51] [abbrv] lens git commit: LENS-735 : Remove accepting TableReferences for ReferenceDimAttribute

Posted by de...@apache.org.
LENS-735 : Remove accepting TableReferences for ReferenceDimAttribute


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/908530f5
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/908530f5
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/908530f5

Branch: refs/heads/current-release-line
Commit: 908530f5883ae8844c6a16cb5564c926cc10bf19
Parents: c73d584
Author: Amareshwari Sriramadasu <am...@gmail.com>
Authored: Mon Jan 11 13:24:32 2016 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Mon Jan 11 13:24:33 2016 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/cube-0.1.xsd        |  36 +-
 lens-api/src/main/resources/lens-errors.conf    |  11 +-
 .../lens/cli/TestLensDimensionCommands.java     |  28 +-
 .../resources/cube_with_no_weight_facts.xml     |   8 +-
 lens-cli/src/test/resources/sample-cube.xml     |  14 +-
 lens-cli/src/test/resources/test-dimension.xml  |  22 +-
 .../lens/cube/error/LensCubeErrorCode.java      |   6 +-
 .../lens/cube/metadata/AbstractBaseTable.java   |  53 +-
 .../lens/cube/metadata/AbstractCubeTable.java   |  26 +-
 .../org/apache/lens/cube/metadata/Cube.java     | 111 ++-
 .../apache/lens/cube/metadata/CubeColumn.java   |   1 -
 .../lens/cube/metadata/CubeDimensionTable.java  |  21 +-
 .../lens/cube/metadata/CubeFactTable.java       |  15 +-
 .../lens/cube/metadata/CubeMetastoreClient.java |  11 +-
 .../apache/lens/cube/metadata/DerivedCube.java  |  31 +-
 .../apache/lens/cube/metadata/Dimension.java    |  20 +-
 .../apache/lens/cube/metadata/JoinChain.java    |  16 +-
 .../cube/metadata/ReferencedDimAtrribute.java   | 195 -----
 .../cube/metadata/ReferencedDimAttribute.java   | 115 +++
 .../apache/lens/cube/metadata/SchemaGraph.java  | 377 ---------
 .../lens/cube/metadata/join/JoinPath.java       | 101 +++
 .../cube/metadata/join/TableRelationship.java   |  46 +
 .../apache/lens/cube/parse/AutoJoinContext.java | 760 -----------------
 .../apache/lens/cube/parse/CandidateDim.java    |  16 +-
 .../lens/cube/parse/CubeQueryContext.java       |  94 +--
 .../cube/parse/DenormalizationResolver.java     |  74 +-
 .../apache/lens/cube/parse/DimHQLContext.java   |   4 +-
 .../apache/lens/cube/parse/FieldValidator.java  |   9 +-
 .../org/apache/lens/cube/parse/HQLParser.java   |   2 +-
 .../org/apache/lens/cube/parse/JoinClause.java  | 144 ----
 .../apache/lens/cube/parse/JoinResolver.java    | 262 +-----
 .../org/apache/lens/cube/parse/JoinTree.java    | 164 ----
 .../org/apache/lens/cube/parse/StorageUtil.java |   8 +-
 .../lens/cube/parse/TimerangeResolver.java      |  10 +-
 .../lens/cube/parse/join/AutoJoinContext.java   | 719 ++++++++++++++++
 .../apache/lens/cube/parse/join/JoinClause.java | 139 +++
 .../apache/lens/cube/parse/join/JoinTree.java   | 164 ++++
 .../apache/lens/cube/parse/join/JoinUtils.java  |  49 ++
 .../cube/metadata/TestCubeMetastoreClient.java  | 284 +++++--
 .../apache/lens/cube/parse/CubeTestSetup.java   | 843 ++++++++++++++-----
 .../FieldsCannotBeQueriedTogetherTest.java      |  11 +-
 .../lens/cube/parse/TestBaseCubeQueries.java    |  26 +-
 .../lens/cube/parse/TestCubeRewriter.java       | 279 +++---
 .../cube/parse/TestDenormalizationResolver.java | 140 +--
 .../lens/cube/parse/TestExpressionContext.java  |  20 +-
 .../lens/cube/parse/TestExpressionResolver.java | 102 +--
 .../lens/cube/parse/TestJoinResolver.java       | 534 +++++-------
 .../lens/cube/parse/TestQueryRewrite.java       |  10 +
 .../lens/cube/parse/TestRewriterPlan.java       |  10 +-
 .../parse/TestTimeRangeWriterWithQuery.java     |  53 +-
 .../src/main/resources/cube-queries.sql         |  74 +-
 lens-examples/src/main/resources/customer.xml   |   4 +-
 .../src/main/resources/dimension-queries.sql    |  14 +-
 lens-examples/src/main/resources/sales-cube.xml |  12 +-
 .../src/main/resources/sample-cube.xml          |  21 +-
 .../main/resources/sample-db-only-dimension.xml |  24 +-
 .../src/main/resources/sample-dimension.xml     |  24 +-
 .../apache/lens/server/metastore/JAXBUtils.java |  54 +-
 .../server/metastore/TestMetastoreService.java  |  17 +-
 .../apache/lens/storage/db/TestDBStorage.java   |  11 +-
 60 files changed, 3156 insertions(+), 3293 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-api/src/main/resources/cube-0.1.xsd
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/cube-0.1.xsd b/lens-api/src/main/resources/cube-0.1.xsd
index 4092133..d195b08 100644
--- a/lens-api/src/main/resources/cube-0.1.xsd
+++ b/lens-api/src/main/resources/cube-0.1.xsd
@@ -393,25 +393,13 @@
     <xs:complexContent>
       <xs:extension base="x_field">
         <xs:sequence>
-          <xs:element name="ref_spec" maxOccurs="1" minOccurs="0">
+          <xs:element type="x_chain_column" name="chain_ref_column" maxOccurs="unbounded" minOccurs="0">
             <xs:annotation>
               <xs:documentation>
-                Reference specifiction needs to be specified if the attribute is a reference attribute. It
-                can either be table reference or a chained column
-
-                ref_spec can be specified as a list of table references to
-                which the attribute is refering to.
-                For ex : userid refers user.id, xuser.id, yuser.id, zuser.id.
-
-                Alternately, ref_spec could be list of chained columns each specifed with chain name and column name.
+                Chain column specification needs to be specified if the attribute is a reference attribute.
+                It can be list of chained columns each specified with chain name and column name.
               </xs:documentation>
             </xs:annotation>
-            <xs:complexType>
-              <xs:choice maxOccurs="1" minOccurs="1">
-                <xs:element type="x_table_references" name="table_references" maxOccurs="1" minOccurs="1"/>
-                <xs:element type="x_chain_column" name="chain_ref_column" maxOccurs="unbounded" minOccurs="1"/>
-              </xs:choice>
-            </xs:complexType>
           </xs:element>
           <xs:element name="hierarchy" type="x_dim_attributes" maxOccurs="1" minOccurs="0">
             <xs:annotation>
@@ -471,13 +459,6 @@
             </xs:documentation>
           </xs:annotation>
         </xs:attribute>
-        <xs:attribute type="xs:boolean" name="join_key" default="true">
-          <xs:annotation>
-            <xs:documentation>
-              This flag will tell whether the attribute can be used as a join key or not
-            </xs:documentation>
-          </xs:annotation>
-        </xs:attribute>
       </xs:extension>
     </xs:complexContent>
   </xs:complexType>
@@ -531,17 +512,6 @@
     </xs:attribute>
   </xs:complexType>
 
-  <xs:complexType name="x_table_references">
-    <xs:annotation>
-      <xs:documentation>
-        Set of table references.
-      </xs:documentation>
-    </xs:annotation>
-    <xs:sequence>
-      <xs:element type="x_table_reference" name="table_reference" maxOccurs="unbounded" minOccurs="1"/>
-    </xs:sequence>
-  </xs:complexType>
-
   <xs:element name="x_join_chains" type="x_join_chains"/>
 
   <xs:complexType name="x_join_chains">

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-api/src/main/resources/lens-errors.conf
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/lens-errors.conf b/lens-api/src/main/resources/lens-errors.conf
index c7ccea1..9087fcd 100644
--- a/lens-api/src/main/resources/lens-errors.conf
+++ b/lens-api/src/main/resources/lens-errors.conf
@@ -207,7 +207,7 @@ lensCubeErrorsForQuery = [
   {
     errorCode = 3018
     httpStatusCode = ${BAD_REQUEST}
-    errorMsg = "No join condition available"
+    errorMsg = "Default aggregate is not set for measure: %s"
   }
 
   {
@@ -219,7 +219,7 @@ lensCubeErrorsForQuery = [
   {
     errorCode = 3020
     httpStatusCode = ${BAD_REQUEST}
-    errorMsg = "Default aggregate is not set for measure: %s"
+    errorMsg = "No join condition available"
   }
 
   {
@@ -294,6 +294,13 @@ lensCubeErrorsForQuery = [
     httpStatusCode = ${INTERNAL_SERVER_ERROR}
     errorMsg = "Could not parse expression %s"
   }
+
+  {
+    errorCode = 3033
+    httpStatusCode = ${BAD_REQUEST}
+    errorMsg = "Could not find queried table or chain: %s"
+  }
+
 ]
 
 lensCubeErrorsForMetastore = [

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionCommands.java
index 42c6bae..160699b 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionCommands.java
@@ -27,7 +27,7 @@ import java.net.URISyntaxException;
 import java.net.URL;
 import java.util.Arrays;
 
-import org.apache.lens.api.metastore.XJoinChains;
+import org.apache.lens.api.metastore.*;
 import org.apache.lens.cli.commands.LensDimensionCommands;
 import org.apache.lens.cli.table.XJoinChainTable;
 import org.apache.lens.client.LensClient;
@@ -62,6 +62,8 @@ public class TestLensDimensionCommands extends LensCliApplicationTest {
    *           the URI syntax exception
    */
   public static void createDimension() throws URISyntaxException {
+    getCommand().createDimension(new File(
+      TestLensCubeCommands.class.getClassLoader().getResource("test-detail.xml").toURI()));
     URL dimensionSpec = TestLensDimensionCommands.class.getClassLoader().getResource("test-dimension.xml");
     getCommand().createDimension(new File(dimensionSpec.toURI()));
   }
@@ -81,16 +83,38 @@ public class TestLensDimensionCommands extends LensCliApplicationTest {
     createDimension();
     dimensionList = getCommand().showDimensions();
     Assert.assertTrue(dimensionList.contains("test_dim"));
+    Assert.assertTrue(dimensionList.contains("test_detail"));
     testFields(getCommand());
     testJoinChains(getCommand());
     testUpdateCommand(new File(dimensionSpec.toURI()), getCommand());
     getCommand().dropDimension("test_dim");
+    getCommand().dropDimension("test_detail");
     dimensionList = getCommand().showDimensions();
     Assert.assertFalse(dimensionList.contains("test_dim"));
+    Assert.assertFalse(dimensionList.contains("test_detail"));
   }
 
   private void testJoinChains(LensDimensionCommands command) {
-    assertEquals(command.showJoinChains("test_dim"), new XJoinChainTable(new XJoinChains()).toString());
+    XJoinChains chains = new XJoinChains();
+    XJoinChain chain1 = new XJoinChain();
+    chain1.setPaths(new XJoinPaths());
+    XJoinPath path = new XJoinPath();
+    path.setEdges(new XJoinEdges());
+    XJoinEdge edge1 = new XJoinEdge();
+    XTableReference ref1 = new XTableReference();
+    ref1.setTable("test_dim");
+    ref1.setColumn("d2id");
+    XTableReference ref2 = new XTableReference();
+    ref2.setTable("test_detail");
+    ref2.setColumn("id");
+    edge1.setFrom(ref1);
+    edge1.setTo(ref2);
+    path.getEdges().getEdge().add(edge1);
+    chain1.setName("dim2chain");
+    chain1.getPaths().getPath().add(path);
+    chain1.setDestTable("test_detail");
+    chains.getJoinChain().add(chain1);
+    assertEquals(command.showJoinChains("test_dim"), new XJoinChainTable(chains).toString());
   }
 
   private void testFields(LensDimensionCommands qCom) {

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cli/src/test/resources/cube_with_no_weight_facts.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/cube_with_no_weight_facts.xml b/lens-cli/src/test/resources/cube_with_no_weight_facts.xml
index 263ca88..13736b2 100644
--- a/lens-cli/src/test/resources/cube_with_no_weight_facts.xml
+++ b/lens-cli/src/test/resources/cube_with_no_weight_facts.xml
@@ -30,13 +30,7 @@
     <dim_attributes>
         <dim_attribute name="dim1" type="INT" />
         <dim_attribute name="dim2" type="INT" start_time='2013-12-01T00:00:00' />
-        <dim_attribute name="dim3" type="INT">
-            <ref_spec>
-                <table_references>
-                    <table_reference table="dim_table" column="id" />
-                </table_references>
-            </ref_spec>
-        </dim_attribute>
+        <dim_attribute name="dim3" type="INT"/>
     </dim_attributes>
     <expressions>
         <expression name="expr_msr5" type="DOUBLE">

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cli/src/test/resources/sample-cube.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/sample-cube.xml b/lens-cli/src/test/resources/sample-cube.xml
index d72d279..e3b3284 100644
--- a/lens-cli/src/test/resources/sample-cube.xml
+++ b/lens-cli/src/test/resources/sample-cube.xml
@@ -34,19 +34,11 @@
   <dim_attributes>
     <dim_attribute name="dim1" type="INT" />
     <dim_attribute name="dim2" type="INT" start_time='2013-12-01T00:00:00' />
-    <dim_attribute name="dim3" type="INT">
-      <ref_spec>
-        <table_references>
-          <table_reference table="dim_table" column="id" />
-        </table_references>
-      </ref_spec>
-    </dim_attribute>
+    <dim_attribute name="dim3" type="INT"/>
     <dim_attribute name="dimDetail" type="string" description="City name to which the customer belongs"
                    display_string="Customer City">
-      <ref_spec>
-        <chain_ref_column chain_name="testdimchain" ref_col="detail" />
-        <chain_ref_column chain_name="testdetailchain" ref_col="name" />
-      </ref_spec>
+      <chain_ref_column chain_name="testdimchain" ref_col="detail" />
+      <chain_ref_column chain_name="testdetailchain" ref_col="name" />
     </dim_attribute>
   </dim_attributes>
   <expressions>

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cli/src/test/resources/test-dimension.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/test-dimension.xml b/lens-cli/src/test/resources/test-dimension.xml
index 6eb3d31..2fa47f1 100644
--- a/lens-cli/src/test/resources/test-dimension.xml
+++ b/lens-cli/src/test/resources/test-dimension.xml
@@ -25,13 +25,7 @@
     <dim_attribute name="id" type="INT" />
     <dim_attribute name="name" type="STRING" />
     <dim_attribute name="detail" type="STRING" start_time='2013-12-01T00:00:00' />
-    <dim_attribute name="d2id" type="INT" start_time='2013-12-01T00:00:00'>
-      <ref_spec>
-        <table_references>
-          <table_reference table="test_dim2" column="id" />
-        </table_references>
-      </ref_spec>
-    </dim_attribute>
+    <dim_attribute name="d2id" type="INT" start_time='2013-12-01T00:00:00'/>
     <dim_attribute name="inline" type="STRING" >
       <values>A</values>
       <values>B</values>
@@ -46,6 +40,20 @@
     </dim_attribute>
   </attributes>
 
+  <join_chains>
+    <join_chain name="dim2chain">
+      <paths>
+        <path>
+         <edges>
+            <edge>
+              <from table="test_dim" column="d2id" />
+              <to table="test_detail" column="id" />
+            </edge>
+          </edges>
+        </path>
+      </paths>
+    </join_chain>
+  </join_chains>
   <properties>
     <property name="test_dim.prop" value="test" />
     <property name="dimension.test_dim.timed.dimension" value="dt" />

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
index 68cd80b..61d08b2 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
@@ -43,7 +43,7 @@ public enum LensCubeErrorCode {
   CANNOT_USE_TIMERANGE_WRITER(3017, 100),
   NO_DEFAULT_AGGREGATE(3018, 200),
   EXPRESSION_NOT_IN_ANY_FACT(3019, 300),
-  NO_JOIN_CONDITION_AVAIABLE(3020, 400),
+  NO_JOIN_CONDITION_AVAILABLE(3020, 400),
   NO_JOIN_PATH(3021, 500),
   COLUMN_UNAVAILABLE_IN_TIME_RANGE(3022, 600),
   NO_DIM_HAS_COLUMN(3023, 700),
@@ -53,9 +53,11 @@ public enum LensCubeErrorCode {
   NO_CANDIDATE_DIM_AVAILABLE(3027, 1100),
   NO_CANDIDATE_FACT_AVAILABLE(3028, 1200),
   NO_CANDIDATE_DIM_STORAGE_TABLES(3029, 1300),
-  NO_STORAGE_TABLE_AVAIABLE(3030, 1400),
+  NO_STORAGE_TABLE_AVAILABLE(3030, 1400),
   STORAGE_UNION_DISABLED(3031, 1500),
   COULD_NOT_PARSE_EXPRESSION(3032, 1500),
+  QUERIED_TABLE_NOT_FOUND(3033, 0),
+
   // Error codes greater than 3100 are errors while doing a metastore operation.
   ERROR_IN_ENTITY_DEFINITION(3101, 100),
   TIMELINE_ABSENT(3102, 100),

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/AbstractBaseTable.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/AbstractBaseTable.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/AbstractBaseTable.java
index 88c9ee8..5543308 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/AbstractBaseTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/AbstractBaseTable.java
@@ -23,7 +23,6 @@ import java.util.*;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
 import com.google.common.base.Preconditions;
@@ -38,7 +37,7 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 public abstract class AbstractBaseTable extends AbstractCubeTable {
   private final Set<ExprColumn> expressions;
-  private static final List<FieldSchema> COLUMNS = new ArrayList<FieldSchema>();
+  private static final List<FieldSchema> COLUMNS = new ArrayList<>();
   private final Map<String, ExprColumn> exprMap;
   @Getter
   private final Set<JoinChain> joinChains;
@@ -52,9 +51,9 @@ public abstract class AbstractBaseTable extends AbstractCubeTable {
     properties, double weight) {
     super(name, COLUMNS, properties, weight);
 
-    exprMap = new HashMap<String, ExprColumn>();
+    exprMap = new HashMap<>();
     if (exprs == null) {
-      this.expressions = new HashSet<ExprColumn>();
+      this.expressions = new HashSet<>();
     } else {
       this.expressions = exprs;
     }
@@ -66,10 +65,10 @@ public abstract class AbstractBaseTable extends AbstractCubeTable {
     if (joinChains != null) {
       this.joinChains = joinChains;
     } else {
-      this.joinChains = new HashSet<JoinChain>();
+      this.joinChains = new HashSet<>();
     }
 
-    chainMap = new HashMap<String, JoinChain>();
+    chainMap = new HashMap<>();
     for (JoinChain chain : this.joinChains) {
       chainMap.put(chain.getName().toLowerCase(), chain);
     }
@@ -78,12 +77,12 @@ public abstract class AbstractBaseTable extends AbstractCubeTable {
   public AbstractBaseTable(Table tbl) {
     super(tbl);
     this.expressions = getExpressions(getName(), getProperties());
-    exprMap = new HashMap<String, ExprColumn>();
+    exprMap = new HashMap<>();
     for (ExprColumn expr : expressions) {
       exprMap.put(expr.getName().toLowerCase(), expr);
     }
     this.joinChains = getJoinChains(this, getJoinChainListPropKey(getName()), getProperties());
-    chainMap = new HashMap<String, JoinChain>();
+    chainMap = new HashMap<>();
     for (JoinChain chain : joinChains) {
       chainMap.put(chain.getName().toLowerCase(), chain);
     }
@@ -110,7 +109,7 @@ public abstract class AbstractBaseTable extends AbstractCubeTable {
   }
 
   private static Set<ExprColumn> getExpressions(String name, Map<String, String> props) {
-    Set<ExprColumn> exprs = new HashSet<ExprColumn>();
+    Set<ExprColumn> exprs = new HashSet<>();
     String exprStr = MetastoreUtil.getNamedStringValue(props, MetastoreUtil.getExpressionListKey(name));
     if (!StringUtils.isBlank(exprStr)) {
       String[] names = exprStr.split(",");
@@ -152,7 +151,7 @@ public abstract class AbstractBaseTable extends AbstractCubeTable {
   }
 
   public ExprColumn getExpressionByName(String exprName) {
-    return exprMap.get(exprName == null ? exprName : exprName.toLowerCase());
+    return exprMap.get(exprName == null ? null : exprName.toLowerCase());
   }
 
   public CubeColumn getColumnByName(String column) {
@@ -162,10 +161,9 @@ public abstract class AbstractBaseTable extends AbstractCubeTable {
   /**
    * Alters the expression if already existing or just adds if it is new expression.
    *
-   * @param expr
-   * @throws HiveException
+   * @param expr ExprColumn
    */
-  public void alterExpression(ExprColumn expr) throws HiveException {
+  public void alterExpression(ExprColumn expr) {
     if (expr == null) {
       throw new NullPointerException("Cannot add null expression");
     }
@@ -183,9 +181,9 @@ public abstract class AbstractBaseTable extends AbstractCubeTable {
   }
 
   /**
-   * Remove the measure with name specified
+   * Remove the expression with name specified
    *
-   * @param exprName
+   * @param exprName expression name
    */
   public void removeExpression(String exprName) {
     if (exprMap.containsKey(exprName.toLowerCase())) {
@@ -197,7 +195,7 @@ public abstract class AbstractBaseTable extends AbstractCubeTable {
   }
 
   public Set<String> getExpressionNames() {
-    Set<String> exprNames = new HashSet<String>();
+    Set<String> exprNames = new HashSet<>();
     for (ExprColumn f : getExpressions()) {
       exprNames.add(f.getName().toLowerCase());
     }
@@ -225,10 +223,9 @@ public abstract class AbstractBaseTable extends AbstractCubeTable {
   /**
    * Alters the joinchain if already existing or just adds if it is new chain
    *
-   * @param joinchain
-   * @throws HiveException
+   * @param joinchain join chain
    */
-  public void alterJoinChain(JoinChain joinchain) throws HiveException {
+  public void alterJoinChain(JoinChain joinchain) {
     if (joinchain == null) {
       throw new NullPointerException("Cannot add null joinchain");
     }
@@ -251,20 +248,20 @@ public abstract class AbstractBaseTable extends AbstractCubeTable {
   }
 
   /**
-   * Returns the property key for Cube/Dimension specific join chain list
+   * Get the property key for Cube/Dimension specific join chain list
    *
-   * @param tblname
-   * @return
+   * @param tblName table name
+   * @return the property key for Cube/Dimension specific join chain list
    */
-  protected abstract String getJoinChainListPropKey(String tblname);
+  protected abstract String getJoinChainListPropKey(String tblName);
 
   /**
    * Get join chains from properties
    *
-   * @return
+   * @return set of join chains
    */
   private static Set<JoinChain> getJoinChains(AbstractBaseTable tbl, String propName, Map<String, String> props) {
-    Set<JoinChain> joinChains = new HashSet<JoinChain>();
+    Set<JoinChain> joinChains = new HashSet<>();
     String joinChainsStr = MetastoreUtil.getNamedStringValue(props, propName);
     if (!StringUtils.isBlank(joinChainsStr)) {
       String[] cnames = joinChainsStr.split(",");
@@ -277,7 +274,7 @@ public abstract class AbstractBaseTable extends AbstractCubeTable {
   }
 
   public Set<String> getJoinChainNames() {
-    Set<String> chainNames = new HashSet<String>();
+    Set<String> chainNames = new HashSet<>();
     for (JoinChain f : getJoinChains()) {
       chainNames.add(f.getName().toLowerCase());
     }
@@ -286,9 +283,9 @@ public abstract class AbstractBaseTable extends AbstractCubeTable {
 
 
   /**
-   * Remove the joinchain with name specified
+   * Remove the join chain with name specified
    *
-   * @param chainName
+   * @param chainName chain name
    */
   public boolean removeJoinChain(String chainName) {
     if (chainMap.containsKey(chainName.toLowerCase())) {

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/AbstractCubeTable.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/AbstractCubeTable.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/AbstractCubeTable.java
index da3a7e5..01098c4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/AbstractCubeTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/AbstractCubeTable.java
@@ -21,16 +21,16 @@ package org.apache.lens.cube.metadata;
 import java.util.*;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
+import lombok.NonNull;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
 public abstract class AbstractCubeTable implements Named {
   private final String name;
   private final List<FieldSchema> columns;
-  private final Map<String, String> properties = new HashMap<String, String>();
+  private final Map<String, String> properties = new HashMap<>();
   private double weight;
 
   protected AbstractCubeTable(String name, List<FieldSchema> columns, Map<String, String> props, double weight) {
@@ -82,7 +82,7 @@ public abstract class AbstractCubeTable implements Named {
   /**
    * Alters the weight of table
    *
-   * @param weight
+   * @param weight Weight of the table.
    */
   public void alterWeight(double weight) {
     this.weight = weight;
@@ -102,7 +102,7 @@ public abstract class AbstractCubeTable implements Named {
   /**
    * Remove property specified by the key
    *
-   * @param propKey
+   * @param propKey property key
    */
   public void removeProperty(String propKey) {
     properties.remove(propKey);
@@ -111,13 +111,9 @@ public abstract class AbstractCubeTable implements Named {
   /**
    * Alters the column if already existing or just adds it if it is new column
    *
-   * @param column
-   * @throws HiveException
+   * @param column The column spec as FieldSchema - name, type and a comment
    */
-  protected void alterColumn(FieldSchema column) throws HiveException {
-    if (column == null) {
-      throw new HiveException("Column cannot be null");
-    }
+  protected void alterColumn(@NonNull FieldSchema column) {
     Iterator<FieldSchema> columnItr = columns.iterator();
     int alterPos = -1;
     int i = 0;
@@ -144,13 +140,9 @@ public abstract class AbstractCubeTable implements Named {
   /**
    * Adds or alters the columns passed
    *
-   * @param columns
-   * @throws HiveException
+   * @param columns The collection of columns
    */
-  protected void addColumns(Collection<FieldSchema> columns) throws HiveException {
-    if (columns == null) {
-      throw new HiveException("Columns cannot be null");
-    }
+  protected void addColumns(@NonNull Collection<FieldSchema> columns) {
     for (FieldSchema column : columns) {
       alterColumn(column);
     }
@@ -202,7 +194,7 @@ public abstract class AbstractCubeTable implements Named {
 
   public Set<String> getAllFieldNames() {
     List<FieldSchema> fields = getColumns();
-    Set<String> columns = new HashSet<String>(fields.size());
+    Set<String> columns = new HashSet<>(fields.size());
     for (FieldSchema f : fields) {
       columns.add(f.getName().toLowerCase());
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/Cube.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Cube.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Cube.java
index f09da37..b376aaf 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Cube.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Cube.java
@@ -22,9 +22,9 @@ import java.lang.reflect.Constructor;
 import java.util.*;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
+import lombok.NonNull;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
@@ -55,12 +55,12 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
     this.measures = measures;
     this.dimensions = dimensions;
 
-    measureMap = new HashMap<String, CubeMeasure>();
+    measureMap = new HashMap<>();
     for (CubeMeasure m : measures) {
       measureMap.put(m.getName().toLowerCase(), m);
     }
 
-    dimMap = new HashMap<String, CubeDimAttribute>();
+    dimMap = new HashMap<>();
     for (CubeDimAttribute dim : dimensions) {
       dimMap.put(dim.getName().toLowerCase(), dim);
     }
@@ -73,12 +73,12 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
     this.measures = getMeasures(getName(), getProperties());
     this.dimensions = getDimensions(getName(), getProperties());
 
-    measureMap = new HashMap<String, CubeMeasure>();
+    measureMap = new HashMap<>();
     for (CubeMeasure m : measures) {
       measureMap.put(m.getName().toLowerCase(), m);
     }
 
-    dimMap = new HashMap<String, CubeDimAttribute>();
+    dimMap = new HashMap<>();
     for (CubeDimAttribute dim : dimensions) {
       addAllDimsToMap(dim);
     }
@@ -103,7 +103,7 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
 
   public Set<String> getTimedDimensions() {
     String str = getProperties().get(MetastoreUtil.getCubeTimedDimensionListKey(getName()));
-    Set<String> timedDimensions = new HashSet<String>();
+    Set<String> timedDimensions = new HashSet<>();
     if (str != null) {
       timedDimensions.addAll(Arrays.asList(StringUtils.split(str, ',')));
     }
@@ -137,7 +137,7 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
   }
 
   public static Set<CubeMeasure> getMeasures(String name, Map<String, String> props) {
-    Set<CubeMeasure> measures = new HashSet<CubeMeasure>();
+    Set<CubeMeasure> measures = new HashSet<>();
     String measureStr = MetastoreUtil.getNamedStringValue(props, MetastoreUtil.getCubeMeasureListKey(name));
     String[] names = measureStr.split(",");
     for (String measureName : names) {
@@ -157,21 +157,23 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
   }
 
   public static Set<CubeDimAttribute> getDimensions(String name, Map<String, String> props) {
-    Set<CubeDimAttribute> dimensions = new HashSet<CubeDimAttribute>();
+    Set<CubeDimAttribute> dimensions = new HashSet<>();
     String dimStr = MetastoreUtil.getNamedStringValue(props, MetastoreUtil.getCubeDimensionListKey(name));
-    String[] names = dimStr.split(",");
-    for (String dimName : names) {
-      String className = props.get(MetastoreUtil.getDimensionClassPropertyKey(dimName));
-      CubeDimAttribute dim;
-      try {
-        Class<?> clazz = Class.forName(className);
-        Constructor<?> constructor;
-        constructor = clazz.getConstructor(String.class, Map.class);
-        dim = (CubeDimAttribute) constructor.newInstance(new Object[]{dimName, props});
-      } catch (Exception e) {
-        throw new IllegalArgumentException("Invalid dimension", e);
+    if (StringUtils.isNotBlank(dimStr)) {
+      String[] names = dimStr.split(",");
+      for (String dimName : names) {
+        String className = props.get(MetastoreUtil.getDimensionClassPropertyKey(dimName));
+        CubeDimAttribute dim;
+        try {
+          Class<?> clazz = Class.forName(className);
+          Constructor<?> constructor;
+          constructor = clazz.getConstructor(String.class, Map.class);
+          dim = (CubeDimAttribute) constructor.newInstance(new Object[]{dimName, props});
+        } catch (Exception e) {
+          throw new IllegalArgumentException("Invalid dimension", e);
+        }
+        dimensions.add(dim);
       }
-      dimensions.add(dim);
     }
     return dimensions;
   }
@@ -226,14 +228,9 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
   /**
    * Alters the measure if already existing or just adds if it is new measure.
    *
-   * @param measure
-   * @throws HiveException
+   * @param measure new measure definition
    */
-  public void alterMeasure(CubeMeasure measure) throws HiveException {
-    if (measure == null) {
-      throw new NullPointerException("Cannot add null measure");
-    }
-
+  public void alterMeasure(@NonNull CubeMeasure measure) {
     // Replace measure if already existing
     if (measureMap.containsKey(measure.getName().toLowerCase())) {
       measures.remove(getMeasureByName(measure.getName()));
@@ -249,9 +246,9 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
   /**
    * Remove the joinchain with name specified
    *
-   * @param chainName
+   * @param chainName chain name
    */
-  public boolean removeJoinChain(String chainName) {
+  public boolean removeJoinChain(@NonNull String chainName) {
     if (super.removeJoinChain(chainName)) {
       log.info("Removing dimension {}", getDimAttributeByName(chainName));
       return true;
@@ -262,14 +259,9 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
   /**
    * Alters the dimension if already existing or just adds if it is new dimension
    *
-   * @param dimension
-   * @throws HiveException
+   * @param dimension the dim attribute
    */
-  public void alterDimension(CubeDimAttribute dimension) throws HiveException {
-    if (dimension == null) {
-      throw new NullPointerException("Cannot add null dimension");
-    }
-
+  public void alterDimension(@NonNull CubeDimAttribute dimension) {
     // Replace dimension if already existing
     if (dimMap.containsKey(dimension.getName().toLowerCase())) {
       dimensions.remove(getDimAttributeByName(dimension.getName()));
@@ -284,11 +276,11 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
 
 
   /**
-   * Remove the dimension with name specified
+   * Remove the attribute with name specified
    *
-   * @param dimName
+   * @param dimName attribute name
    */
-  public void removeDimension(String dimName) {
+  public void removeDimension(@NonNull String dimName) {
     if (dimMap.containsKey(dimName.toLowerCase())) {
       log.info("Removing dimension {}", getDimAttributeByName(dimName));
       dimensions.remove(getDimAttributeByName(dimName));
@@ -300,9 +292,9 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
   /**
    * Remove the measure with name specified
    *
-   * @param msrName
+   * @param msrName measure name
    */
-  public void removeMeasure(String msrName) {
+  public void removeMeasure(@NonNull String msrName) {
     if (measureMap.containsKey(msrName.toLowerCase())) {
       log.info("Removing measure {}", getMeasureByName(msrName));
       measures.remove(getMeasureByName(msrName));
@@ -314,17 +306,13 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
   /**
    * Adds the timed dimension
    *
-   * @param timedDimension
-   * @throws HiveException
+   * @param timedDimension time dimension
    */
-  public void addTimedDimension(String timedDimension) throws HiveException {
-    if (timedDimension == null || timedDimension.isEmpty()) {
-      throw new HiveException("Invalid timed dimension " + timedDimension);
-    }
+  public void addTimedDimension(@NonNull String timedDimension) {
     timedDimension = timedDimension.toLowerCase();
     Set<String> timeDims = getTimedDimensions();
     if (timeDims == null) {
-      timeDims = new LinkedHashSet<String>();
+      timeDims = new LinkedHashSet<>();
     }
     if (timeDims.contains(timedDimension)) {
       log.info("Timed dimension {} is already present in cube {}", timedDimension, getName());
@@ -338,13 +326,9 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
   /**
    * Removes the timed dimension
    *
-   * @param timedDimension
-   * @throws HiveException
+   * @param timedDimension time dimension
    */
-  public void removeTimedDimension(String timedDimension) throws HiveException {
-    if (timedDimension == null || timedDimension.isEmpty()) {
-      throw new HiveException("Invalid timed dimension " + timedDimension);
-    }
+  public void removeTimedDimension(@NonNull String timedDimension) {
     timedDimension = timedDimension.toLowerCase();
     Set<String> timeDims = getTimedDimensions();
     if (timeDims != null && timeDims.contains(timedDimension)) {
@@ -360,7 +344,7 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
 
   @Override
   public Set<String> getMeasureNames() {
-    Set<String> measureNames = new HashSet<String>();
+    Set<String> measureNames = new HashSet<>();
     for (CubeMeasure f : getMeasures()) {
       measureNames.add(f.getName().toLowerCase());
     }
@@ -369,7 +353,7 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
 
   @Override
   public Set<String> getDimAttributeNames() {
-    Set<String> dimNames = new HashSet<String>();
+    Set<String> dimNames = new HashSet<>();
     for (CubeDimAttribute f : getDimAttributes()) {
       MetastoreUtil.addColumnNames(f, dimNames);
     }
@@ -378,9 +362,9 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
 
   @Override
   public boolean allFieldsQueriable() {
-    String canbeQueried = getProperties().get(MetastoreConstants.CUBE_ALL_FIELDS_QUERIABLE);
-    if (canbeQueried != null) {
-      return Boolean.parseBoolean(canbeQueried);
+    String canBeQueried = getProperties().get(MetastoreConstants.CUBE_ALL_FIELDS_QUERIABLE);
+    if (canBeQueried != null) {
+      return Boolean.parseBoolean(canBeQueried);
     }
     return true;
   }
@@ -398,22 +382,21 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
    * @see org.apache.lens.cube.metadata.AbstractBaseTable
    */
   @Override
-  protected String getJoinChainListPropKey(String tblname) {
+  protected String getJoinChainListPropKey(@NonNull String tblname) {
     return MetastoreUtil.getCubeJoinChainListKey(getName());
   }
 
-  public String getPartitionColumnOfTimeDim(String timeDimName) {
+  public String getPartitionColumnOfTimeDim(@NonNull String timeDimName) {
     String partCol = getProperties().get(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + timeDimName);
     return StringUtils.isNotBlank(partCol) ? partCol : timeDimName;
   }
 
-  public String getTimeDimOfPartitionColumn(String partCol) {
+  public String getTimeDimOfPartitionColumn(@NonNull String partCol) {
     Map<String, String> properties = getProperties();
     for (Map.Entry<String, String> entry : properties.entrySet()) {
       if (entry.getKey().startsWith(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX)
         && entry.getValue().equalsIgnoreCase(partCol)) {
-        String timeDim = entry.getKey().replace(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX, "");
-        return timeDim;
+        return entry.getKey().replace(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX, "");
       }
     }
     return partCol;

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java
index b04532f..77024c0 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java
@@ -24,7 +24,6 @@ import java.util.Date;
 import java.util.Map;
 import java.util.TimeZone;
 
-
 import com.google.common.base.Optional;
 
 import lombok.NonNull;

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeDimensionTable.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeDimensionTable.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeDimensionTable.java
index cd80d64..713f476 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeDimensionTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeDimensionTable.java
@@ -22,17 +22,17 @@ import java.util.*;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
 import com.google.common.collect.Sets;
 
+import lombok.NonNull;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
 public final class CubeDimensionTable extends AbstractCubeTable {
   private String dimName; // dimension name the dimtabe belongs to
-  private final Map<String, UpdatePeriod> snapshotDumpPeriods = new HashMap<String, UpdatePeriod>();
+  private final Map<String, UpdatePeriod> snapshotDumpPeriods = new HashMap<>();
 
   public CubeDimensionTable(String dimName, String dimTblName, List<FieldSchema> columns, double weight,
     Map<String, UpdatePeriod> snapshotDumpPeriods) {
@@ -61,7 +61,7 @@ public final class CubeDimensionTable extends AbstractCubeTable {
 
 
   private static Map<String, UpdatePeriod> getSnapshotDumpPeriods(Set<String> storages) {
-    Map<String, UpdatePeriod> snapshotDumpPeriods = new HashMap<String, UpdatePeriod>();
+    Map<String, UpdatePeriod> snapshotDumpPeriods = new HashMap<>();
     for (String storage : storages) {
       snapshotDumpPeriods.put(storage, null);
     }
@@ -134,7 +134,7 @@ public final class CubeDimensionTable extends AbstractCubeTable {
   private static Map<String, UpdatePeriod> getDumpPeriods(String name, Map<String, String> params) {
     String storagesStr = params.get(MetastoreUtil.getDimensionStorageListKey(name));
     if (!StringUtils.isBlank(storagesStr)) {
-      Map<String, UpdatePeriod> dumpPeriods = new HashMap<String, UpdatePeriod>();
+      Map<String, UpdatePeriod> dumpPeriods = new HashMap<>();
       for (String storage : StringUtils.split(storagesStr, ",")) {
         String dumpPeriod = params.get(MetastoreUtil.getDimensionDumpPeriodKey(name, storage));
         if (dumpPeriod != null) {
@@ -193,7 +193,7 @@ public final class CubeDimensionTable extends AbstractCubeTable {
   /**
    * Alter the dimension name that the table belongs to
    *
-   * @param newDimName
+   * @param newDimName new dimension name.
    */
   public void alterUberDim(String newDimName) {
     this.dimName = newDimName;
@@ -205,13 +205,8 @@ public final class CubeDimensionTable extends AbstractCubeTable {
    *
    * @param storage Storage name
    * @param period  The new value
-   * @throws HiveException
    */
-  public void alterSnapshotDumpPeriod(String storage, UpdatePeriod period) throws HiveException {
-    if (storage == null) {
-      throw new HiveException("Cannot add null storage for " + getName());
-    }
-
+  public void alterSnapshotDumpPeriod(@NonNull String storage, UpdatePeriod period) {
     if (snapshotDumpPeriods.containsKey(storage)) {
       log.info("Updating dump period for {} from {} to {}", storage, snapshotDumpPeriods.get(storage), period);
     }
@@ -221,12 +216,12 @@ public final class CubeDimensionTable extends AbstractCubeTable {
   }
 
   @Override
-  public void alterColumn(FieldSchema column) throws HiveException {
+  public void alterColumn(FieldSchema column) {
     super.alterColumn(column);
   }
 
   @Override
-  public void addColumns(Collection<FieldSchema> columns) throws HiveException {
+  public void addColumns(Collection<FieldSchema> columns) {
     super.addColumns(columns);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
index b1fec8c..643bcfe 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
@@ -20,12 +20,12 @@ package org.apache.lens.cube.metadata;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.UpdatePeriod.UpdatePeriodComparator;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
 import com.google.common.collect.Lists;
@@ -261,11 +261,11 @@ public class CubeFactTable extends AbstractCubeTable {
    *
    * @param storage
    * @param updatePeriods
-   * @throws HiveException
    */
-  public void alterStorage(String storage, Set<UpdatePeriod> updatePeriods) throws HiveException {
+  public void alterStorage(String storage, Set<UpdatePeriod> updatePeriods) throws LensException{
     if (!storageUpdatePeriods.containsKey(storage)) {
-      throw new HiveException("Invalid storage" + storage);
+      throw new LensException(LensCubeErrorCode.ERROR_IN_ENTITY_DEFINITION.getLensErrorInfo(),
+        "Invalid storage" + storage);
     }
     storageUpdatePeriods.put(storage, updatePeriods);
     addUpdatePeriodProperies(getName(), getProperties(), storageUpdatePeriods);
@@ -276,9 +276,8 @@ public class CubeFactTable extends AbstractCubeTable {
    *
    * @param storage
    * @param updatePeriods
-   * @throws HiveException
    */
-  void addStorage(String storage, Set<UpdatePeriod> updatePeriods) throws HiveException {
+  void addStorage(String storage, Set<UpdatePeriod> updatePeriods) {
     storageUpdatePeriods.put(storage, updatePeriods);
     addUpdatePeriodProperies(getName(), getProperties(), storageUpdatePeriods);
   }
@@ -296,12 +295,12 @@ public class CubeFactTable extends AbstractCubeTable {
   }
 
   @Override
-  public void alterColumn(FieldSchema column) throws HiveException {
+  public void alterColumn(FieldSchema column) {
     super.alterColumn(column);
   }
 
   @Override
-  public void addColumns(Collection<FieldSchema> columns) throws HiveException {
+  public void addColumns(Collection<FieldSchema> columns) {
     super.addColumns(columns);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index ae0fb90..dcb932e 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -85,7 +85,6 @@ public class CubeMetastoreClient {
   PartitionTimelineCache partitionTimelineCache = new PartitionTimelineCache();
   // dbname to client mapping
   private static final Map<String, CubeMetastoreClient> CLIENT_MAPPING = Maps.newConcurrentMap();
-  private SchemaGraph schemaGraph;
   // Set of all storage table names for which latest partitions exist
   private final Set<String> latestLookupCache = Sets.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
 
@@ -627,7 +626,8 @@ public class CubeMetastoreClient {
    * @throws HiveException
    */
   public void createCube(String name, Set<CubeMeasure> measures, Set<CubeDimAttribute> dimensions,
-    Set<ExprColumn> expressions, Set<JoinChain> chains, Map<String, String> properties) throws HiveException {
+    Set<ExprColumn> expressions, Set<JoinChain> chains, Map<String, String> properties)
+    throws HiveException {
     Cube cube = new Cube(name, measures, dimensions, expressions, chains, properties, 0L);
     createCube(cube);
   }
@@ -1858,13 +1858,6 @@ public class CubeMetastoreClient {
     return false;
   }
 
-  public synchronized SchemaGraph getSchemaGraph() throws HiveException {
-    if (schemaGraph == null) {
-      schemaGraph = new SchemaGraph(this);
-    }
-    return schemaGraph;
-  }
-
   /**
    *
    * @param table     table name

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java
index 681aa7b..4c73785 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java
@@ -25,22 +25,21 @@ import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
 import com.google.common.collect.Lists;
 
 public class DerivedCube extends AbstractCubeTable implements CubeInterface {
 
-  private static final List<FieldSchema> COLUMNS = new ArrayList<FieldSchema>();
+  private static final List<FieldSchema> COLUMNS = new ArrayList<>();
 
   static {
     COLUMNS.add(new FieldSchema("dummy", "string", "dummy column"));
   }
 
   private final Cube parent;
-  private final Set<String> measures = new HashSet<String>();
-  private final Set<String> dimensions = new HashSet<String>();
+  private final Set<String> measures = new HashSet<>();
+  private final Set<String> dimensions = new HashSet<>();
 
   public DerivedCube(String name, Set<String> measures, Set<String> dimensions, Cube parent) throws LensException {
     this(name, measures, dimensions, new HashMap<String, String>(), 0L, parent);
@@ -99,8 +98,8 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
     this.parent = parent;
   }
 
-  private Set<CubeMeasure> cachedMeasures = new HashSet<CubeMeasure>();
-  private Set<CubeDimAttribute> cachedDims = new HashSet<CubeDimAttribute>();
+  private Set<CubeMeasure> cachedMeasures = new HashSet<>();
+  private Set<CubeDimAttribute> cachedDims = new HashSet<>();
 
   public Set<CubeMeasure> getMeasures() {
     synchronized (measures) {
@@ -152,7 +151,7 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
   }
 
   public static Set<String> getMeasures(String name, Map<String, String> props) {
-    Set<String> measures = new HashSet<String>();
+    Set<String> measures = new HashSet<>();
     String measureStr = MetastoreUtil.getNamedStringValue(props, MetastoreUtil.getCubeMeasureListKey(name));
     measures.addAll(Arrays.asList(StringUtils.split(measureStr, ',')));
     return measures;
@@ -161,7 +160,7 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
   public Set<String> getTimedDimensions() {
     String str = getProperties().get(MetastoreUtil.getCubeTimedDimensionListKey(getName()));
     if (str != null) {
-      Set<String> timedDimensions = new HashSet<String>();
+      Set<String> timedDimensions = new HashSet<>();
       timedDimensions.addAll(Arrays.asList(StringUtils.split(str, ',')));
       return timedDimensions;
     } else {
@@ -170,7 +169,7 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
   }
 
   public static Set<String> getDimensions(String name, Map<String, String> props) {
-    Set<String> dimensions = new HashSet<String>();
+    Set<String> dimensions = new HashSet<>();
     String dimStr = MetastoreUtil.getNamedStringValue(props, MetastoreUtil.getCubeDimensionListKey(name));
     dimensions.addAll(Arrays.asList(StringUtils.split(dimStr, ',')));
     return dimensions;
@@ -236,10 +235,9 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
   /**
    * Add a new measure
    *
-   * @param measure
-   * @throws HiveException
+   * @param measure measure name
    */
-  public void addMeasure(String measure) throws HiveException {
+  public void addMeasure(String measure) {
     measures.add(measure.toLowerCase());
     updateMeasureProperties();
   }
@@ -247,10 +245,9 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
   /**
    * Add a new dimension
    *
-   * @param dimension
-   * @throws HiveException
+   * @param dimension attribute name
    */
-  public void addDimension(String dimension) throws HiveException {
+  public void addDimension(String dimension) {
     dimensions.add(dimension.toLowerCase());
     updateDimAttributeProperties();
   }
@@ -287,7 +284,7 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
 
   @Override
   public Set<String> getDimAttributeNames() {
-    Set<String> dimNames = new HashSet<String>();
+    Set<String> dimNames = new HashSet<>();
     for (CubeDimAttribute f : getDimAttributes()) {
       MetastoreUtil.addColumnNames(f, dimNames);
     }
@@ -311,7 +308,7 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
 
   @Override
   public Set<String> getAllFieldNames() {
-    Set<String> fieldNames = new HashSet<String>();
+    Set<String> fieldNames = new HashSet<>();
     fieldNames.addAll(getMeasureNames());
     fieldNames.addAll(getDimAttributeNames());
     fieldNames.addAll(getTimedDimensions());

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/Dimension.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Dimension.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Dimension.java
index 27cbc30..86eb6eb 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Dimension.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Dimension.java
@@ -24,9 +24,9 @@ import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
+import lombok.NonNull;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
@@ -48,7 +48,7 @@ public class Dimension extends AbstractBaseTable {
     super(name, expressions, joinChains, properties, weight);
     this.attributes = attributes;
 
-    attributeMap = new HashMap<String, CubeDimAttribute>();
+    attributeMap = new HashMap<>();
     for (CubeDimAttribute dim : attributes) {
       attributeMap.put(dim.getName().toLowerCase(), dim);
     }
@@ -59,7 +59,7 @@ public class Dimension extends AbstractBaseTable {
     super(tbl);
     this.attributes = getAttributes(getName(), getProperties());
 
-    attributeMap = new HashMap<String, CubeDimAttribute>();
+    attributeMap = new HashMap<>();
     for (CubeDimAttribute attr : attributes) {
       addAllAttributesToMap(attr);
     }
@@ -108,7 +108,7 @@ public class Dimension extends AbstractBaseTable {
   }
 
   public static Set<CubeDimAttribute> getAttributes(String name, Map<String, String> props) {
-    Set<CubeDimAttribute> attributes = new HashSet<CubeDimAttribute>();
+    Set<CubeDimAttribute> attributes = new HashSet<>();
     String attrStr = MetastoreUtil.getNamedStringValue(props, MetastoreUtil.getDimAttributeListKey(name));
     String[] names = attrStr.split(",");
     for (String attrName : names) {
@@ -135,11 +135,6 @@ public class Dimension extends AbstractBaseTable {
     return MetastoreUtil.getDimensionJoinChainListKey(tblname);
   }
 
-//  public boolean isChainedColumn(String name) {
-//    Preconditions.checkArgument(name != null);
-//    return ((ReferencedDimAtrribute) attributeMap.get(name.toLowerCase())).isChainedColumn();
-//  }
-
   @Override
   public int hashCode() {
     return super.hashCode();
@@ -185,13 +180,8 @@ public class Dimension extends AbstractBaseTable {
    * Alters the attribute if already existing or just adds if it is new attribute
    *
    * @param attribute
-   * @throws HiveException
    */
-  public void alterAttribute(CubeDimAttribute attribute) throws HiveException {
-    if (attribute == null) {
-      throw new NullPointerException("Cannot add null attribute");
-    }
-
+  public void alterAttribute(@NonNull CubeDimAttribute attribute) {
     // Replace dimension if already existing
     if (attributeMap.containsKey(attribute.getName().toLowerCase())) {
       attributes.remove(getAttributeByName(attribute.getName()));

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java
index 6250905..cc8929f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java
@@ -20,8 +20,8 @@ package org.apache.lens.cube.metadata;
 
 import java.util.*;
 
-import org.apache.lens.cube.metadata.SchemaGraph.JoinPath;
-import org.apache.lens.cube.metadata.SchemaGraph.TableRelationship;
+import org.apache.lens.cube.metadata.join.JoinPath;
+import org.apache.lens.cube.metadata.join.TableRelationship;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -313,14 +313,14 @@ public class JoinChain implements Named {
   }
 
   /**
-   * Convert join paths to schemaGraph's JoinPath
+   * Convert join chain paths to JoinPath objects
    *
    * @param client
-   * @return List&lt;SchemaGraph.JoinPath&gt;
+   * @return List&lt;JoinPath&gt;
    * @throws HiveException
    */
-  public List<SchemaGraph.JoinPath> getRelationEdges(CubeMetastoreClient client) throws HiveException {
-    List<SchemaGraph.JoinPath> schemaGraphPaths = new ArrayList<SchemaGraph.JoinPath>();
+  public List<JoinPath> getRelationEdges(CubeMetastoreClient client) throws HiveException {
+    List<JoinPath> joinPaths = new ArrayList<>();
     for (Path path : paths) {
       JoinPath jp = new JoinPath();
       // Add edges from dimension to cube
@@ -328,8 +328,8 @@ public class JoinChain implements Named {
         jp.addEdge(path.links.get(i).toDimToDimRelationship(client));
       }
       jp.addEdge(path.links.get(0).toCubeOrDimRelationship(client));
-      schemaGraphPaths.add(jp);
+      joinPaths.add(jp);
     }
-    return schemaGraphPaths;
+    return joinPaths;
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
deleted file mode 100644
index c51b489..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
+++ /dev/null
@@ -1,195 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.metadata;
-
-import java.util.*;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-
-import lombok.Data;
-import lombok.EqualsAndHashCode;
-import lombok.Getter;
-import lombok.ToString;
-
-@EqualsAndHashCode(callSuper = true)
-@ToString(callSuper = true)
-public class ReferencedDimAtrribute extends BaseDimAttribute {
-  private static final char CHAIN_REF_COL_SEPARATOR = ',';
-
-  @Getter
-  private final List<TableReference> references = new ArrayList<>();
-  // boolean whether to say the key is only a denormalized variable kept or can
-  // be used in join resolution as well
-  @Getter private Boolean isJoinKey = true;
-  @Getter private List<ChainRefCol> chainRefColumns = new ArrayList<>();
-
-  @Data
-  public static class ChainRefCol {
-    private final String chainName;
-    private final String refColumn;
-  }
-
-  public ReferencedDimAtrribute(FieldSchema column, String displayString, TableReference reference) {
-    this(column, displayString, reference, null, null, null);
-  }
-
-  public ReferencedDimAtrribute(FieldSchema column, String displayString, TableReference reference, Date startTime,
-      Date endTime, Double cost) {
-    this(column, displayString, reference, startTime, endTime, cost, true);
-  }
-
-  public ReferencedDimAtrribute(FieldSchema column, String displayString, TableReference reference, Date startTime,
-      Date endTime, Double cost, boolean isJoinKey) {
-    this(column, displayString, reference, startTime, endTime, cost, isJoinKey, null);
-  }
-
-  public ReferencedDimAtrribute(FieldSchema column, String displayString, TableReference reference, Date startTime,
-      Date endTime, Double cost, boolean isJoinKey, Long numOfDistinctValues) {
-    super(column, displayString, startTime, endTime, cost, numOfDistinctValues);
-    this.references.add(reference);
-    this.isJoinKey = isJoinKey;
-  }
-
-  public ReferencedDimAtrribute(FieldSchema column, String displayString, Collection<TableReference> references) {
-    this(column, displayString, references, null, null, null);
-  }
-
-  public ReferencedDimAtrribute(FieldSchema column, String displayString, Collection<TableReference> references,
-      Date startTime, Date endTime, Double cost) {
-    this(column, displayString, references, startTime, endTime, cost, true);
-  }
-
-  public ReferencedDimAtrribute(FieldSchema column, String displayString, Collection<TableReference> references,
-      Date startTime, Date endTime, Double cost, boolean isJoinKey) {
-    this(column, displayString, references, startTime, endTime, cost, isJoinKey, null);
-  }
-
-  public ReferencedDimAtrribute(FieldSchema column, String displayString, Collection<TableReference> references,
-      Date startTime, Date endTime, Double cost, boolean isJoinKey, Long numOfDistinctValues) {
-    this(column, displayString, references, startTime, endTime, cost, isJoinKey, numOfDistinctValues, null);
-  }
-
-  public ReferencedDimAtrribute(FieldSchema column, String displayString, Collection<TableReference> references,
-      Date startTime, Date endTime, Double cost, boolean isJoinKey, Long numOfDistinctValues, List<String> values) {
-    super(column, displayString, startTime, endTime, cost, numOfDistinctValues, values);
-    this.references.addAll(references);
-    this.isJoinKey = isJoinKey;
-  }
-
-  public ReferencedDimAtrribute(FieldSchema column, String displayString, String chainName, String refColumn,
-      Date startTime, Date endTime, Double cost) {
-    this(column, displayString, chainName, refColumn, startTime, endTime, cost, null);
-  }
-
-  public ReferencedDimAtrribute(FieldSchema column, String displayString, String chainName, String refColumn,
-      Date startTime, Date endTime, Double cost, Long numOfDistinctValues) {
-    this(column, displayString,
-      Collections.singletonList(new ChainRefCol(chainName.toLowerCase(), refColumn.toLowerCase())), startTime, endTime,
-      cost, numOfDistinctValues);
-  }
-
-  public ReferencedDimAtrribute(FieldSchema column, String displayString, List<ChainRefCol> chainRefCols,
-    Date startTime, Date endTime, Double cost, Long numOfDistinctValues) {
-    this(column, displayString, chainRefCols, startTime, endTime, cost, numOfDistinctValues, null);
-  }
-
-  public ReferencedDimAtrribute(FieldSchema column, String displayString, List<ChainRefCol> chainRefCols,
-    Date startTime, Date endTime, Double cost, Long numOfDistinctValues, List<String> values) {
-    super(column, displayString, startTime, endTime, cost, numOfDistinctValues, values);
-    chainRefColumns.addAll(chainRefCols);
-    this.isJoinKey = false;
-  }
-
-  public void addReference(TableReference reference) {
-    references.add(reference);
-  }
-
-  public boolean removeReference(TableReference ref) {
-    return references.remove(ref);
-  }
-
-  public boolean useAsJoinKey() {
-    return isJoinKey;
-  }
-
-  @Override
-  public void addProperties(Map<String, String> props) {
-    super.addProperties(props);
-    if (!chainRefColumns.isEmpty()) {
-      StringBuilder chainNamesValue = new StringBuilder();
-      StringBuilder refColsValue = new StringBuilder();
-      Iterator<ChainRefCol> iter = chainRefColumns.iterator();
-      // Add the first without appending separator
-      ChainRefCol chainRefCol = iter.next();
-      chainNamesValue.append(chainRefCol.getChainName());
-      refColsValue.append(chainRefCol.getRefColumn());
-      while (iter.hasNext()) {
-        chainRefCol = iter.next();
-        chainNamesValue.append(CHAIN_REF_COL_SEPARATOR).append(chainRefCol.getChainName());
-        refColsValue.append(CHAIN_REF_COL_SEPARATOR).append(chainRefCol.getRefColumn());
-      }
-      props.put(MetastoreUtil.getDimRefChainNameKey(getName()), chainNamesValue.toString());
-      props.put(MetastoreUtil.getDimRefChainColumnKey(getName()), refColsValue.toString());
-    } else {
-      props.put(MetastoreUtil.getDimensionSrcReferenceKey(getName()),
-          MetastoreUtil.getReferencesString(references));
-      props.put(MetastoreUtil.getDimUseAsJoinKey(getName()), isJoinKey.toString());
-    }
-  }
-
-  /**
-   * This is used only for serializing
-   *
-   * @param name
-   * @param props
-   */
-  public ReferencedDimAtrribute(String name, Map<String, String> props) {
-    super(name, props);
-    String chNamesStr = props.get(MetastoreUtil.getDimRefChainNameKey(getName()));
-    if (!StringUtils.isBlank(chNamesStr)) {
-      String refColsStr = props.get(MetastoreUtil.getDimRefChainColumnKey(getName()));
-      String[] chainNames = StringUtils.split(chNamesStr, ",");
-      String[] refCols = StringUtils.split(refColsStr, ",");
-      for (int i = 0; i < chainNames.length; i++) {
-        chainRefColumns.add(new ChainRefCol(chainNames[i], refCols[i]));
-      }
-      this.isJoinKey = false;
-    } else {
-      String refListStr = props.get(MetastoreUtil.getDimensionSrcReferenceKey(getName()));
-      String[] refListDims = StringUtils.split(refListStr, ",");
-      for (String refDimRaw : refListDims) {
-        references.add(new TableReference(refDimRaw));
-      }
-      String isJoinKeyStr = props.get(MetastoreUtil.getDimUseAsJoinKey(name));
-      if (isJoinKeyStr != null) {
-        isJoinKey = Boolean.parseBoolean(isJoinKeyStr);
-      }
-    }
-  }
-
-  /**
-   * Tells whether the attribute is retrieved from chain
-   *
-   * @return true/false
-   */
-  public boolean isChainedColumn() {
-    return !chainRefColumns.isEmpty();
-  }
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAttribute.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAttribute.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAttribute.java
new file mode 100644
index 0000000..9a1c44b
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAttribute.java
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata;
+
+import java.util.*;
+
+import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+
+import lombok.Data;
+import lombok.EqualsAndHashCode;
+import lombok.Getter;
+import lombok.ToString;
+
+@EqualsAndHashCode(callSuper = true)
+@ToString(callSuper = true)
+public class ReferencedDimAttribute extends BaseDimAttribute {
+  private static final char CHAIN_REF_COL_SEPARATOR = ',';
+  @Getter
+  private List<ChainRefCol> chainRefColumns = new ArrayList<>();
+
+  @Data
+  public static class ChainRefCol {
+    private final String chainName;
+    private final String refColumn;
+  }
+
+  public ReferencedDimAttribute(FieldSchema column, String displayString, String chainName, String refColumn,
+    Date startTime, Date endTime, Double cost) throws LensException {
+    this(column, displayString, chainName, refColumn, startTime, endTime, cost, null);
+  }
+
+  public ReferencedDimAttribute(FieldSchema column, String displayString, String chainName, String refColumn,
+    Date startTime, Date endTime, Double cost, Long numOfDistinctValues) throws LensException {
+    this(column, displayString,
+      Collections.singletonList(new ChainRefCol(chainName.toLowerCase(), refColumn.toLowerCase())), startTime, endTime,
+      cost, numOfDistinctValues);
+  }
+
+  public ReferencedDimAttribute(FieldSchema column, String displayString, List<ChainRefCol> chainRefCols,
+    Date startTime, Date endTime, Double cost, Long numOfDistinctValues) throws LensException {
+    this(column, displayString, chainRefCols, startTime, endTime, cost, numOfDistinctValues, null);
+  }
+
+  public ReferencedDimAttribute(FieldSchema column, String displayString, List<ChainRefCol> chainRefCols,
+    Date startTime, Date endTime, Double cost, Long numOfDistinctValues, List<String> values) throws LensException {
+    super(column, displayString, startTime, endTime, cost, numOfDistinctValues, values);
+    if (chainRefCols.isEmpty()) {
+      throw new LensException(LensCubeErrorCode.ERROR_IN_ENTITY_DEFINITION.getLensErrorInfo(), " Ref column: "
+        + getName() + " does not have any chain_ref_column defined");
+    }
+    chainRefColumns.addAll(chainRefCols);
+  }
+
+  @Override
+  public void addProperties(Map<String, String> props) {
+    super.addProperties(props);
+    StringBuilder chainNamesValue = new StringBuilder();
+    StringBuilder refColsValue = new StringBuilder();
+    Iterator<ChainRefCol> iterator = chainRefColumns.iterator();
+    // Add the first without appending separator
+    ChainRefCol chainRefCol = iterator.next();
+    chainNamesValue.append(chainRefCol.getChainName());
+    refColsValue.append(chainRefCol.getRefColumn());
+    while (iterator.hasNext()) {
+      chainRefCol = iterator.next();
+      chainNamesValue.append(CHAIN_REF_COL_SEPARATOR).append(chainRefCol.getChainName());
+      refColsValue.append(CHAIN_REF_COL_SEPARATOR).append(chainRefCol.getRefColumn());
+    }
+    props.put(MetastoreUtil.getDimRefChainNameKey(getName()), chainNamesValue.toString());
+    props.put(MetastoreUtil.getDimRefChainColumnKey(getName()), refColsValue.toString());
+  }
+
+  /**
+   * This is used only for serializing
+   *
+   * @param name attribute name
+   * @param props Properties
+   */
+  public ReferencedDimAttribute(String name, Map<String, String> props) throws LensException {
+    super(name, props);
+    String chNamesStr = props.get(MetastoreUtil.getDimRefChainNameKey(getName()));
+    if (!StringUtils.isBlank(chNamesStr)) {
+      String refColsStr = props.get(MetastoreUtil.getDimRefChainColumnKey(getName()));
+      String[] chainNames = StringUtils.split(chNamesStr, ",");
+      String[] refCols = StringUtils.split(refColsStr, ",");
+      for (int i = 0; i < chainNames.length; i++) {
+        chainRefColumns.add(new ChainRefCol(chainNames[i], refCols[i]));
+      }
+    } else {
+      throw new LensException(LensCubeErrorCode.ERROR_IN_ENTITY_DEFINITION.getLensErrorInfo(), " Ref column: "
+        + getName() + " does not have any chain_ref_column defined");
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/SchemaGraph.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/SchemaGraph.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/SchemaGraph.java
deleted file mode 100644
index fa230ef..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/SchemaGraph.java
+++ /dev/null
@@ -1,377 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.metadata;
-
-import java.util.*;
-
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-
-import lombok.AllArgsConstructor;
-import lombok.Data;
-import lombok.RequiredArgsConstructor;
-
-public class SchemaGraph {
-  /*
-   * An edge in the schema graph
-   */
-  @Data
-  @AllArgsConstructor
-  @RequiredArgsConstructor
-  public static class TableRelationship {
-    final String fromColumn;
-    final AbstractCubeTable fromTable;
-    final String toColumn;
-    final AbstractCubeTable toTable;
-    boolean mapsToMany = false;
-
-    @Override
-    public String toString() {
-      return fromTable.getName() + "." + fromColumn + "->" + toTable.getName() + "." + toColumn
-        + (mapsToMany ? "[n]" : "");
-    }
-
-  }
-
-  /**
-   * A list of table relationships that can be combined to get a join clause
-   */
-  public static class JoinPath {
-    final List<TableRelationship> edges;
-    // Store the map of a table against all columns of that table which are in the path
-    private Map<AbstractCubeTable, List<String>> columnsForTable = new HashMap<AbstractCubeTable, List<String>>();
-
-    public JoinPath() {
-      edges = new ArrayList<TableRelationship>();
-    }
-
-    public JoinPath(JoinPath other) {
-      edges = new ArrayList<TableRelationship>(other.edges);
-    }
-
-    public void initColumnsForTable() {
-      if (!columnsForTable.isEmpty()) {
-        // already inited
-        return;
-      }
-      for (TableRelationship edge : edges) {
-        addColumnsForEdge(edge);
-      }
-    }
-
-    public void addEdge(TableRelationship edge) {
-      edges.add(edge);
-    }
-
-    public boolean isEmpty() {
-      return edges.isEmpty();
-    }
-
-    public List<TableRelationship> getEdges() {
-      return edges;
-    }
-
-    private void addColumnsForEdge(TableRelationship edge) {
-      addColumn(edge.getFromTable(), edge.getFromColumn());
-      addColumn(edge.getToTable(), edge.getToColumn());
-    }
-
-    private void addColumn(AbstractCubeTable table, String column) {
-      if (table == null || column == null) {
-        return;
-      }
-      List<String> columns = columnsForTable.get(table);
-      if (columns == null) {
-        columns = new ArrayList<String>();
-        columnsForTable.put(table, columns);
-      }
-      columns.add(column);
-    }
-
-    public List<String> getColumnsForTable(AbstractCubeTable table) {
-      return columnsForTable.get(table);
-    }
-
-    public Set<AbstractCubeTable> getAllTables() {
-      return columnsForTable.keySet();
-    }
-
-    public boolean containsColumnOfTable(String column, AbstractCubeTable table) {
-      for (TableRelationship edge : edges) {
-        if ((table.equals(edge.getFromTable()) && column.equals(edge.getFromColumn()))
-          || table.equals(edge.getToTable()) && column.equals(edge.getToColumn())) {
-          return true;
-        }
-      }
-      return false;
-    }
-
-    public String toString() {
-      return edges.toString();
-    }
-  }
-
-  /**
-   * Perform a search for join paths on the schema graph
-   */
-  public static class GraphSearch {
-    private final AbstractCubeTable source;
-    private final AbstractCubeTable target;
-    // edges going out of the table
-    private final Map<AbstractCubeTable, Set<TableRelationship>> outGraph;
-    // egds coming into the table
-    private final Map<AbstractCubeTable, Set<TableRelationship>> inGraph;
-    // Used in tests to validate that all paths are searched
-
-    public GraphSearch(AbstractCubeTable source, AbstractCubeTable target, SchemaGraph graph) {
-      this.source = source;
-      this.target = target;
-
-      if (target instanceof CubeInterface) {
-        this.outGraph = graph.getCubeGraph((CubeInterface) target);
-        this.inGraph = graph.getCubeInGraph((CubeInterface) target);
-      } else if (target instanceof Dimension) {
-        this.outGraph = graph.getDimOnlyGraph();
-        this.inGraph = graph.getDimOnlyInGraph();
-      } else {
-        throw new IllegalArgumentException("Target neither cube nor dimension");
-      }
-    }
-
-    public List<JoinPath> findAllPathsToTarget() {
-      return findAllPathsToTarget(source, new JoinPath(), new HashSet<AbstractCubeTable>());
-    }
-
-    /**
-     * Recursive DFS to get all paths between source and target. Let path till this node = p Paths at node adjacent to
-     * target = [edges leading to target] Path at a random node = [path till this node + p for each p in
-     * path(neighbors)]
-     */
-    List<JoinPath> findAllPathsToTarget(AbstractCubeTable source, JoinPath joinPathTillSource,
-      Set<AbstractCubeTable> visited) {
-      List<JoinPath> joinPaths = new ArrayList<JoinPath>();
-      visited.add(source);
-
-      if (inGraph.get(source) == null) {
-        return joinPaths;
-      }
-      for (TableRelationship edge : inGraph.get(source)) {
-        if (visited.contains(edge.getFromTable())) {
-          continue;
-        }
-
-
-        JoinPath p = new JoinPath(joinPathTillSource);
-        p.addEdge(edge);
-        AbstractCubeTable neighbor = edge.getFromTable();
-        if (neighbor.getName().equals(target.getName())) {
-          // Got a direct path
-          joinPaths.add(p);
-        } else if (neighbor instanceof Dimension) {
-          List<JoinPath> pathsFromNeighbor = findAllPathsToTarget(neighbor, new JoinPath(p), visited);
-          for (JoinPath pn : pathsFromNeighbor) {
-            if (!pn.isEmpty()) {
-              joinPaths.add(pn);
-            }
-          }
-        }
-      }
-
-      return joinPaths;
-    }
-  }
-
-  /**
-   * Graph of tables in the cube metastore. Links between the tables are relationships in the cube.
-   */
-  private final CubeMetastoreClient metastore;
-  // Graph for each cube
-  // graph with out going edges
-  private Map<CubeInterface, Map<AbstractCubeTable, Set<TableRelationship>>> cubeOutGraph;
-  // graph with incoming edges
-  private Map<CubeInterface, Map<AbstractCubeTable, Set<TableRelationship>>> cubeInGraph;
-
-  // sub graph that contains only dimensions, mainly used while checking connectivity between a set of dimensions
-  // graph with out going edges
-  private Map<AbstractCubeTable, Set<TableRelationship>> dimOnlyOutGraph;
-  // graph with incoming edges
-  private Map<AbstractCubeTable, Set<TableRelationship>> dimOnlyInGraph;
-
-  public SchemaGraph(CubeMetastoreClient metastore) throws HiveException {
-    this.metastore = metastore;
-    buildSchemaGraph();
-  }
-
-  public Map<AbstractCubeTable, Set<TableRelationship>> getCubeGraph(CubeInterface cube) {
-    return cubeOutGraph.get(cube);
-  }
-
-  public Map<AbstractCubeTable, Set<TableRelationship>> getDimOnlyGraph() {
-    return dimOnlyOutGraph;
-  }
-
-  public Map<AbstractCubeTable, Set<TableRelationship>> getCubeInGraph(CubeInterface cube) {
-    return cubeInGraph.get(cube);
-  }
-
-  public Map<AbstractCubeTable, Set<TableRelationship>> getDimOnlyInGraph() {
-    return dimOnlyInGraph;
-  }
-
-  /**
-   * Build the schema graph for all cubes and dimensions
-   *
-   * @return
-   * @throws org.apache.hadoop.hive.ql.metadata.HiveException
-   */
-  private void buildSchemaGraph() throws HiveException {
-    cubeOutGraph = new HashMap<CubeInterface, Map<AbstractCubeTable, Set<TableRelationship>>>();
-    cubeInGraph = new HashMap<CubeInterface, Map<AbstractCubeTable, Set<TableRelationship>>>();
-    for (CubeInterface cube : metastore.getAllCubes()) {
-      Map<AbstractCubeTable, Set<TableRelationship>> outGraph
-        = new HashMap<AbstractCubeTable, Set<TableRelationship>>();
-      Map<AbstractCubeTable, Set<TableRelationship>> inGraph
-        = new HashMap<AbstractCubeTable, Set<TableRelationship>>();
-      buildGraph((AbstractCubeTable) cube, outGraph, inGraph);
-
-      for (Dimension dim : metastore.getAllDimensions()) {
-        buildGraph(dim, outGraph, inGraph);
-      }
-
-      cubeOutGraph.put(cube, outGraph);
-      cubeInGraph.put(cube, inGraph);
-    }
-
-    dimOnlyOutGraph = new HashMap<AbstractCubeTable, Set<TableRelationship>>();
-    dimOnlyInGraph = new HashMap<AbstractCubeTable, Set<TableRelationship>>();
-    for (Dimension dim : metastore.getAllDimensions()) {
-      buildGraph(dim, dimOnlyOutGraph, dimOnlyInGraph);
-    }
-  }
-
-  private List<CubeDimAttribute> getRefDimensions(AbstractCubeTable cube) throws HiveException {
-    List<CubeDimAttribute> refDimensions = new ArrayList<CubeDimAttribute>();
-    Set<CubeDimAttribute> allAttrs = null;
-    if (cube instanceof CubeInterface) {
-      allAttrs = ((CubeInterface) cube).getDimAttributes();
-    } else if (cube instanceof Dimension) {
-      allAttrs = ((Dimension) cube).getAttributes();
-    } else {
-      throw new HiveException("Not a valid table type" + cube);
-    }
-    // find out all dimensions which link to other dimension tables
-    for (CubeDimAttribute dim : allAttrs) {
-      if (dim instanceof ReferencedDimAtrribute) {
-        if (((ReferencedDimAtrribute) dim).useAsJoinKey()) {
-          refDimensions.add(dim);
-        }
-      } else if (dim instanceof HierarchicalDimAttribute) {
-        for (CubeDimAttribute hdim : ((HierarchicalDimAttribute) dim).getHierarchy()) {
-          if (hdim instanceof ReferencedDimAtrribute && ((ReferencedDimAtrribute) hdim).useAsJoinKey()) {
-            refDimensions.add(hdim);
-          }
-        }
-      }
-    }
-    return refDimensions;
-  }
-
-  // Build schema graph for a cube/dimension
-  private void buildGraph(AbstractCubeTable cubeTable, Map<AbstractCubeTable, Set<TableRelationship>> outGraph,
-    Map<AbstractCubeTable, Set<TableRelationship>> inGraph)
-    throws HiveException {
-    List<CubeDimAttribute> refDimensions = getRefDimensions(cubeTable);
-
-    // build graph for each linked dimension
-    for (CubeDimAttribute dim : refDimensions) {
-      // Find out references leading from dimension columns of the cube/dimension if any
-      if (dim instanceof ReferencedDimAtrribute) {
-        ReferencedDimAtrribute refDim = (ReferencedDimAtrribute) dim;
-        List<TableReference> refs = refDim.getReferences();
-
-        for (TableReference ref : refs) {
-          String destColumnName = ref.getDestColumn();
-          String destTableName = ref.getDestTable();
-
-          if (metastore.isDimension(destTableName)) {
-            // Cube -> Dimension or Dimension -> Dimension reference
-            Dimension relatedDim = metastore.getDimension(destTableName);
-            addLinks(refDim.getName(), cubeTable, destColumnName, relatedDim, ref.isMapsToMany(), outGraph, inGraph);
-          } else {
-            throw new HiveException("Dim -> Cube references are not supported: " + dim.getName() + "."
-              + refDim.getName() + "->" + destTableName + "." + destColumnName);
-          }
-        } // end loop for refs from a dim
-      }
-    }
-  }
-
-  private void addLinks(String srcCol, AbstractCubeTable srcTbl, String destCol, AbstractCubeTable destTbl,
-    boolean mapsToMany, Map<AbstractCubeTable, Set<TableRelationship>> outGraph,
-    Map<AbstractCubeTable, Set<TableRelationship>> inGraph) {
-
-    TableRelationship rel = new TableRelationship(srcCol, srcTbl, destCol, destTbl, mapsToMany);
-
-    Set<TableRelationship> inEdges = inGraph.get(destTbl);
-    if (inEdges == null) {
-      inEdges = new LinkedHashSet<TableRelationship>();
-      inGraph.put(destTbl, inEdges);
-    }
-    inEdges.add(rel);
-
-    Set<TableRelationship> outEdges = outGraph.get(srcTbl);
-    if (outEdges == null) {
-      outEdges = new LinkedHashSet<TableRelationship>();
-      outGraph.put(srcTbl, outEdges);
-    }
-
-    outEdges.add(rel);
-
-  }
-
-  public void print() {
-    for (CubeInterface cube : cubeOutGraph.keySet()) {
-      Map<AbstractCubeTable, Set<TableRelationship>> graph = cubeOutGraph.get(cube);
-      System.out.println("**Cube " + cube.getName() + " Out egdes");
-      System.out.println("--Out Graph-Nodes=" + graph.size());
-      for (AbstractCubeTable tab : graph.keySet()) {
-        System.out.println(tab.getName() + "::" + graph.get(tab));
-      }
-    }
-    System.out.println("**Dim only outgraph");
-    System.out.println("--Out Graph-Nodes=" + dimOnlyOutGraph.size());
-    for (AbstractCubeTable tab : dimOnlyOutGraph.keySet()) {
-      System.out.println(tab.getName() + "::" + dimOnlyOutGraph.get(tab));
-    }
-
-    for (CubeInterface cube : cubeInGraph.keySet()) {
-      Map<AbstractCubeTable, Set<TableRelationship>> graph = cubeInGraph.get(cube);
-      System.out.println("**Cube " + cube.getName() + " In egdes");
-      System.out.println("--In Graph-Nodes=" + graph.size());
-      for (AbstractCubeTable tab : graph.keySet()) {
-        System.out.println(tab.getName() + "::" + graph.get(tab));
-      }
-    }
-    System.out.println("**Dim only Ingraph");
-    System.out.println("--In Graph-Nodes=" + dimOnlyInGraph.size());
-    for (AbstractCubeTable tab : dimOnlyInGraph.keySet()) {
-      System.out.println(tab.getName() + "::" + dimOnlyInGraph.get(tab));
-    }
-
-  }
-}


[39/51] [abbrv] lens git commit: LENS-912 : Make keyword optional in queries

Posted by de...@apache.org.
LENS-912 : Make  keyword optional in queries


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/b3f993d8
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/b3f993d8
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/b3f993d8

Branch: refs/heads/current-release-line
Commit: b3f993d8af508f900b22b7c67813b734b50255d8
Parents: 8a36572
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Wed Jan 27 16:58:18 2016 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Jan 27 16:58:18 2016 +0530

----------------------------------------------------------------------
 .../org/apache/lens/api/query/QueryStatus.java  |   5 +
 .../lens/cube/metadata/CubeMetastoreClient.java |  20 +-
 .../org/apache/lens/cube/parse/HQLParser.java   |   7 +
 .../server/query/QueryExecutionServiceImpl.java |   2 +-
 .../lens/server/query/QueryServiceResource.java |   3 +-
 .../lens/server/rewrite/CubeKeywordRemover.java |  41 ++++
 .../apache/lens/server/rewrite/RewriteUtil.java | 150 +++++++++-----
 .../rewrite/UserQueryToCubeQueryRewriter.java   |   2 +-
 .../lens/server/query/TestQueryService.java     |  36 ++--
 .../server/rewrite/CubeKeywordRemoverTest.java  |  46 +++++
 .../lens/server/rewrite/TestRewriting.java      | 198 +++++++++++++++----
 src/site/apt/user/olap-cube.apt                 |   2 +-
 12 files changed, 403 insertions(+), 109 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/b3f993d8/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java b/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java
index 91cbe39..44fd97e 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java
@@ -220,6 +220,11 @@ public class QueryStatus implements Serializable {
     return status.equals(Status.QUEUED);
   }
 
+  public boolean failed() {
+    return status.equals(Status.FAILED);
+  }
+
+
   /**
    * Checks if is valid transition.
    *

http://git-wip-us.apache.org/repos/asf/lens/blob/b3f993d8/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index dcb932e..8969d1f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -40,14 +40,16 @@ import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
-import org.apache.hadoop.hive.ql.metadata.*;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.thrift.TException;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
-
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -234,6 +236,20 @@ public class CubeMetastoreClient {
     return latestParts;
   }
 
+  public boolean isLensQueryableTable(String tableName) {
+    try {
+      Table table = getTable(tableName);
+      String typeProperty = table.getProperty(MetastoreConstants.TABLE_TYPE_KEY);
+      if (StringUtils.isBlank(typeProperty)) {
+        return false;
+      }
+      CubeTableType type = CubeTableType.valueOf(typeProperty);
+      return type == CubeTableType.CUBE || type == CubeTableType.DIMENSION;
+    } catch (HiveException e) {
+      return false;
+    }
+  }
+
 
   /**
    * In-memory storage of {@link PartitionTimeline} objects for each valid

http://git-wip-us.apache.org/repos/asf/lens/blob/b3f993d8/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index b1deb07..c9aff5d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -833,4 +833,11 @@ public final class HQLParser {
 
     return true;
   }
+
+  public static ASTNode leftMostChild(ASTNode node) {
+    while (node.getChildren() != null) {
+      node = (ASTNode) node.getChild(0);
+    }
+    return node;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b3f993d8/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index 2dff9af..672f2be 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -432,7 +432,7 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
         log.info("Driver {} for type {} is loaded", driverPath.getName(), driverType);
       } catch (Exception e) {
         log.error("Could not load driver {} of type {}", driverPath.getName(), driverType, e);
-        throw new LensException("Could not load driver "+driverPath.getName()+ " of type "+ driverType);
+        throw new LensException("Could not load driver "+driverPath.getName()+ " of type "+ driverType, e);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/b3f993d8/lens-server/src/main/java/org/apache/lens/server/query/QueryServiceResource.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryServiceResource.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryServiceResource.java
index 08192bd..bb4cfd2 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryServiceResource.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryServiceResource.java
@@ -199,7 +199,8 @@ public class QueryServiceResource {
    * {@link org.apache.lens.api.query.SubmitOp#EXECUTE} operation.
    * {@link QueryPlan} in case of {@link org.apache.lens.api.query.SubmitOp#EXPLAIN} operation.
    * {@link QueryHandleWithResultSet} in case {@link org.apache.lens.api.query.SubmitOp#EXECUTE_WITH_TIMEOUT}
-   * operation. {@link QueryCostTO} in case of {@link org.apache.lens.api.query.SubmitOp#ESTIMATE} operation.
+   * operation. {@link org.apache.lens.api.result.QueryCostTO} in case of
+   * {@link org.apache.lens.api.query.SubmitOp#ESTIMATE} operation.
    */
   @POST
   @Path("queries")

http://git-wip-us.apache.org/repos/asf/lens/blob/b3f993d8/lens-server/src/main/java/org/apache/lens/server/rewrite/CubeKeywordRemover.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/rewrite/CubeKeywordRemover.java b/lens-server/src/main/java/org/apache/lens/server/rewrite/CubeKeywordRemover.java
new file mode 100644
index 0000000..fd8d6e7
--- /dev/null
+++ b/lens-server/src/main/java/org/apache/lens/server/rewrite/CubeKeywordRemover.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ *
+ */
+package org.apache.lens.server.rewrite;
+
+
+import org.apache.lens.server.api.error.LensException;
+import org.apache.lens.server.api.query.rewrite.Phase1Rewriter;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+public class CubeKeywordRemover implements Phase1Rewriter {
+  @Override
+  public String rewrite(String query, Configuration queryConf, HiveConf metastoreConf) throws LensException {
+    return query.replaceAll("(?i)cube\\s+select", "select");
+  }
+
+  @Override
+  public void init(Configuration rewriteConf) {
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/b3f993d8/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java b/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
index abec2b3..1c0cd35 100644
--- a/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
+++ b/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
@@ -22,9 +22,8 @@ import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
+import org.apache.lens.cube.metadata.CubeMetastoreClient;
 import org.apache.lens.cube.parse.CubeQueryContext;
 import org.apache.lens.cube.parse.CubeQueryRewriter;
 import org.apache.lens.cube.parse.HQLParser;
@@ -38,6 +37,7 @@ import org.apache.lens.server.api.query.AbstractQueryContext;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 
@@ -52,14 +52,6 @@ public final class RewriteUtil {
   private RewriteUtil() {
 
   }
-
-  /** The cube pattern. */
-  static Pattern cubePattern = Pattern.compile(".*CUBE(.*)", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE
-    | Pattern.DOTALL);
-
-  /** The matcher. */
-  static Matcher matcher = null;
-
   /**
    * The Class CubeQueryInfo.
    */
@@ -92,14 +84,15 @@ public final class RewriteUtil {
     if (log.isDebugEnabled()) {
       log.debug("User query AST:{}", ast.dump());
     }
-    List<CubeQueryInfo> cubeQueries = new ArrayList<CubeQueryInfo>();
-    findCubePositions(ast, cubeQueries, query);
+    List<CubeQueryInfo> cubeQueries = new ArrayList<>();
+    findCubePositions(ast, cubeQueries, query, conf);
     for (CubeQueryInfo cqi : cubeQueries) {
       cqi.query = query.substring(cqi.startPos, cqi.endPos);
     }
     return cubeQueries;
   }
 
+
   /**
    * Find cube positions.
    *
@@ -108,7 +101,8 @@ public final class RewriteUtil {
    * @param originalQuery the original query
    * @throws LensException the lens exception
    */
-  private static void findCubePositions(ASTNode ast, List<CubeQueryInfo> cubeQueries, String originalQuery)
+  private static void findCubePositions(ASTNode ast, List<CubeQueryInfo> cubeQueries, String originalQuery,
+    HiveConf conf)
     throws LensException {
     int childCount = ast.getChildCount();
     if (ast.getToken() != null) {
@@ -116,13 +110,22 @@ public final class RewriteUtil {
         log.debug("First child: {} Type:{}", ast.getChild(0), ((ASTNode) ast.getChild(0)).getToken().getType());
       }
       if (ast.getToken().getType() == HiveParser.TOK_QUERY
-        && ((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.KW_CUBE) {
+        && (isCubeKeywordNode((ASTNode) ast.getChild(0)) || isFromNodeWithCubeTable((ASTNode) ast.getChild(0), conf))) {
         log.debug("Inside cube clause");
         CubeQueryInfo cqi = new CubeQueryInfo();
         cqi.cubeAST = ast;
         if (ast.getParent() != null) {
           ASTNode parent = (ASTNode) ast.getParent();
-          cqi.startPos = ast.getCharPositionInLine();
+          if (isCubeKeywordNode((ASTNode) ast.getChild(0))) {
+            cqi.startPos = ast.getCharPositionInLine();
+          } else {
+            ASTNode selectAST = (ASTNode) ast.getChild(1).getChild(1);
+            // Left most child of select AST will have char position just after select / select distinct
+            // Go back one "select[ distinct]"
+            cqi.startPos = getStartPos(originalQuery, HQLParser.leftMostChild(selectAST).getCharPositionInLine(),
+              "distinct");
+            cqi.startPos = getStartPos(originalQuery, cqi.startPos, "select");
+          }
           int ci = ast.getChildIndex();
           if (parent.getToken() == null || parent.getToken().getType() == HiveParser.TOK_EXPLAIN
             || parent.getToken().getType() == HiveParser.TOK_CREATETABLE) {
@@ -134,7 +137,17 @@ public final class RewriteUtil {
               cqi.endPos = getEndPos(originalQuery, parent.getChild(ci + 1).getCharPositionInLine(), ")");
             } else if (parent.getToken().getType() == HiveParser.TOK_UNION) {
               // one less for the next start and less the size of string 'UNION ALL'
-              cqi.endPos = getEndPos(originalQuery, parent.getChild(ci + 1).getCharPositionInLine() - 1, "UNION ALL");
+              ASTNode nextChild = (ASTNode) parent.getChild(ci + 1);
+              if (isCubeKeywordNode((ASTNode) nextChild.getChild(0))) {
+                cqi.endPos = getEndPos(originalQuery, nextChild.getCharPositionInLine() - 1, "UNION ALL");
+              } else {
+                // Go back one "union all select[ distinct]"
+                cqi.endPos = getEndPos(originalQuery, nextChild.getChild(1).getChild(1).getCharPositionInLine() - 1,
+                  "distinct");
+                cqi.endPos = getEndPos(originalQuery, cqi.endPos, "select");
+                cqi.endPos = getEndPos(originalQuery, cqi.endPos, "union all");
+              }
+
             } else {
               // Not expected to reach here
               log.warn("Unknown query pattern found with AST:{}", ast.dump());
@@ -156,7 +169,7 @@ public final class RewriteUtil {
         cubeQueries.add(cqi);
       } else {
         for (int childPos = 0; childPos < childCount; ++childPos) {
-          findCubePositions((ASTNode) ast.getChild(childPos), cubeQueries, originalQuery);
+          findCubePositions((ASTNode) ast.getChild(childPos), cubeQueries, originalQuery, conf);
         }
       }
     } else {
@@ -164,6 +177,39 @@ public final class RewriteUtil {
     }
   }
 
+  private static boolean isCubeTableNode(ASTNode node, HiveConf conf) throws LensException {
+    if (node.getType() == HiveParser.TOK_TABREF || node.getType() == HiveParser.TOK_TABNAME) {
+      return isCubeTableNode((ASTNode) node.getChild(0), conf);
+    }
+    if (node.getText().contains("JOIN")) {
+      if (isCubeTableNode((ASTNode) node.getChild(0), conf)) {
+        for (int i = 1; i < node.getChildCount(); i += 2) {
+          if (!isCubeTableNode((ASTNode) node.getChild(i), conf)) {
+            return false;
+          }
+        }
+        return true;
+      }
+    }
+    return node.getType() == HiveParser.Identifier && getClient(conf).isLensQueryableTable(node.getText());
+  }
+
+  private static boolean isFromNodeWithCubeTable(ASTNode child, HiveConf conf) throws LensException {
+    return child.getType() == HiveParser.TOK_FROM && isCubeTableNode((ASTNode) child.getChild(0), conf);
+  }
+
+  public static CubeMetastoreClient getClient(HiveConf conf) throws LensException {
+    try {
+      return CubeMetastoreClient.getInstance(conf);
+    } catch (HiveException e) {
+      throw new LensException("Couldn't get instance of metastore client", e);
+    }
+  }
+
+  private static boolean isCubeKeywordNode(ASTNode child) {
+    return child.getToken().getType() == HiveParser.KW_CUBE;
+  }
+
   /**
    * Gets the end pos.
    *
@@ -173,17 +219,38 @@ public final class RewriteUtil {
    * @return the end pos
    */
   private static int getEndPos(String query, int backTrackIndex, String... backTrackStr) {
+    backTrackIndex = backTrack(query, backTrackIndex, backTrackStr);
+    while (backTrackIndex > 0 && Character.isSpaceChar(query.charAt(backTrackIndex - 1))) {
+      backTrackIndex--;
+    }
+    return backTrackIndex;
+  }
+
+  private static int backTrack(String query, int backTrackIndex, String... backTrackStr) {
     if (backTrackStr != null) {
       String q = query.substring(0, backTrackIndex).toLowerCase();
-      for (int i = 0; i < backTrackStr.length; i++) {
-        if (q.trim().endsWith(backTrackStr[i].toLowerCase())) {
-          backTrackIndex = q.lastIndexOf(backTrackStr[i].toLowerCase());
+      for (String aBackTrackStr : backTrackStr) {
+        if (q.trim().endsWith(aBackTrackStr.toLowerCase())) {
+          backTrackIndex = q.lastIndexOf(aBackTrackStr.toLowerCase());
           break;
         }
       }
     }
-    while (Character.isSpaceChar(query.charAt(backTrackIndex - 1))) {
-      backTrackIndex--;
+    return backTrackIndex;
+  }
+
+  /**
+   * Gets the end pos.
+   *
+   * @param query          the query
+   * @param backTrackIndex the back track index
+   * @param backTrackStr   the back track str
+   * @return the end pos
+   */
+  private static int getStartPos(String query, int backTrackIndex, String... backTrackStr) {
+    backTrackIndex = backTrack(query, backTrackIndex, backTrackStr);
+    while (backTrackIndex < query.length() && Character.isSpaceChar(query.charAt(backTrackIndex))) {
+      backTrackIndex++;
     }
     return backTrackIndex;
   }
@@ -224,7 +291,7 @@ public final class RewriteUtil {
     try {
 
       String replacedQuery = getReplacedQuery(ctx.getPhase1RewrittenQuery());
-      Map<LensDriver, DriverRewriterRunnable> runnables = new LinkedHashMap<LensDriver, DriverRewriterRunnable>();
+      Map<LensDriver, DriverRewriterRunnable> runnables = new LinkedHashMap<>();
       List<RewriteUtil.CubeQueryInfo> cubeQueries = findCubePositions(replacedQuery, ctx.getHiveConf());
 
       for (LensDriver driver : ctx.getDriverContext().getDrivers()) {
@@ -246,8 +313,7 @@ public final class RewriteUtil {
   }
 
   public static DriverQueryPlan getRewriterPlan(DriverRewriterRunnable rewriter) {
-    RewriterPlan plan = new RewriterPlan(rewriter.cubeQueryCtx);
-    return plan;
+    return new RewriterPlan(rewriter.cubeQueryCtx);
   }
 
   public static class DriverRewriterRunnable implements Runnable {
@@ -275,15 +341,15 @@ public final class RewriteUtil {
     private String rewrittenQuery;
 
     public DriverRewriterRunnable(LensDriver driver,
-                                  AbstractQueryContext ctx,
-                                  List<CubeQueryInfo> cubeQueries,
-                                  String replacedQuery) {
+      AbstractQueryContext ctx,
+      List<CubeQueryInfo> cubeQueries,
+      String replacedQuery) {
       this.driver = driver;
       this.ctx = ctx;
       this.cubeQueries = cubeQueries;
       this.replacedQuery = replacedQuery;
       if (cubeQueries != null) {
-        cubeQueryCtx = new ArrayList<CubeQueryContext>(cubeQueries.size());
+        cubeQueryCtx = new ArrayList<>(cubeQueries.size());
       }
     }
 
@@ -296,7 +362,7 @@ public final class RewriteUtil {
       }
 
       MethodMetricsContext rewriteGauge = MethodMetricsFactory
-          .createMethodGauge(ctx.getDriverConf(driver), true, REWRITE_QUERY_GAUGE);
+        .createMethodGauge(ctx.getDriverConf(driver), true, REWRITE_QUERY_GAUGE);
       StringBuilder builder = new StringBuilder();
       int start = 0;
       CubeQueryRewriter rewriter = null;
@@ -320,7 +386,7 @@ public final class RewriteUtil {
           // Parse and rewrite individual cube query
           CubeQueryContext cqc = rewriter.rewrite(cqi.query);
           MethodMetricsContext toHQLGauge = MethodMetricsFactory
-              .createMethodGauge(ctx.getDriverConf(driver), true, qIndex + "-" + TOHQL_GAUGE);
+            .createMethodGauge(ctx.getDriverConf(driver), true, qIndex + "-" + TOHQL_GAUGE);
           // toHQL actually generates the rewritten query
           String hqlQuery = cqc.toHQL();
           cubeQueryCtx.add(cqc);
@@ -368,25 +434,9 @@ public final class RewriteUtil {
       log.warn("Driver : {}  Skipped for the query rewriting due to ", driver, e);
       ctx.setDriverRewriteError(driver, e);
       failureCause = new StringBuilder(" Driver :")
-          .append(driver.getFullyQualifiedName())
-          .append(" Cause :" + e.getLocalizedMessage())
-          .toString();
-    }
-  }
-
-  /**
-   * Checks if is cube query.
-   *
-   * @param query the query
-   * @return true, if is cube query
-   */
-  public static boolean isCubeQuery(String query) {
-    if (matcher == null) {
-      matcher = cubePattern.matcher(query);
-    } else {
-      matcher.reset(query);
+        .append(driver.getFullyQualifiedName())
+        .append(" Cause :" + e.getLocalizedMessage())
+        .toString();
     }
-    return matcher.matches();
   }
-
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b3f993d8/lens-server/src/main/java/org/apache/lens/server/rewrite/UserQueryToCubeQueryRewriter.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/rewrite/UserQueryToCubeQueryRewriter.java b/lens-server/src/main/java/org/apache/lens/server/rewrite/UserQueryToCubeQueryRewriter.java
index 35de4d7..79cee62 100644
--- a/lens-server/src/main/java/org/apache/lens/server/rewrite/UserQueryToCubeQueryRewriter.java
+++ b/lens-server/src/main/java/org/apache/lens/server/rewrite/UserQueryToCubeQueryRewriter.java
@@ -39,7 +39,7 @@ import com.google.common.collect.Lists;
  * @see LensConfConstants#QUERY_PHASE1_REWRITERS
  */
 public class UserQueryToCubeQueryRewriter {
-  List<Phase1Rewriter> phase1RewriterList = Lists.newArrayList();
+  List<Phase1Rewriter> phase1RewriterList = Lists.<Phase1Rewriter>newArrayList(new CubeKeywordRemover());
 
   public UserQueryToCubeQueryRewriter(Configuration conf) throws LensException {
     try {

http://git-wip-us.apache.org/repos/asf/lens/blob/b3f993d8/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index 494bce5..3facded 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -228,7 +228,7 @@ public class TestQueryService extends LensJerseyTest {
     Collection<LensDriver> drivers = queryService.getDrivers();
     assertEquals(drivers.size(), 4);
     Set<String> driverNames = new HashSet<String>(drivers.size());
-    for(LensDriver driver : drivers){
+    for (LensDriver driver : drivers) {
       assertEquals(driver.getConf().get("lens.driver.test.drivername"), driver.getFullyQualifiedName());
       driverNames.add(driver.getFullyQualifiedName());
     }
@@ -458,7 +458,7 @@ public class TestQueryService extends LensJerseyTest {
     Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
     LensAPIResult result = response.readEntity(LensAPIResult.class);
     List<LensErrorTO> childErrors = result.getLensErrorTO().getChildErrors();
-    boolean hiveSemanticErrorExists=false;
+    boolean hiveSemanticErrorExists = false;
     for (LensErrorTO error : childErrors) {
       if (error.getCode() == LensDriverErrorCode.SEMANTIC_ERROR.getLensErrorInfo().getErrorCode()) {
         hiveSemanticErrorExists = true;
@@ -1335,7 +1335,7 @@ public class TestQueryService extends LensJerseyTest {
         if (driver instanceof HiveDriver) {
           addedToHiveDriver =
             ((HiveDriver) driver).areDBResourcesAddedForSession(sessionHandle.getPublicId().toString(), DB_WITH_JARS);
-          if (addedToHiveDriver){
+          if (addedToHiveDriver) {
             break; //There are two Hive drivers now both pointing to same hive server. So break after first success
           }
         }
@@ -1382,7 +1382,8 @@ public class TestQueryService extends LensJerseyTest {
     final FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
       MediaType.APPLICATION_XML_TYPE));
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "cube select ID from nonexist"));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(),
+      "cube sdfelect ID from cube_nonexist"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "estimate"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
       MediaType.APPLICATION_XML_TYPE));
@@ -1392,8 +1393,9 @@ public class TestQueryService extends LensJerseyTest {
 
 
     LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(
-      LensCubeErrorCode.NEITHER_CUBE_NOR_DIMENSION.getLensErrorInfo().getErrorCode(),
-      "Neither cube nor dimensions accessed in the query", TestDataUtils.MOCK_STACK_TRACE);
+      LensCubeErrorCode.SYNTAX_ERROR.getLensErrorInfo().getErrorCode(),
+      "Syntax Error: line 1:5 cannot recognize input near 'sdfelect' 'ID' 'from' in select clause",
+      TestDataUtils.MOCK_STACK_TRACE);
     ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(BAD_REQUEST, expectedLensErrorTO);
 
     expectedData.verify(response);
@@ -1458,17 +1460,17 @@ public class TestQueryService extends LensJerseyTest {
     MetricRegistry reg = LensMetricsRegistry.getStaticRegistry();
 
     assertTrue(reg.getGauges().keySet().containsAll(Arrays.asList(
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-DRIVER_SELECTION",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive1-CUBE_REWRITE",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive1-DRIVER_ESTIMATE",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive1-RewriteUtil-rewriteQuery",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive2-CUBE_REWRITE",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive2-DRIVER_ESTIMATE",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive2-RewriteUtil-rewriteQuery",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-jdbc/jdbc1-CUBE_REWRITE",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-jdbc/jdbc1-DRIVER_ESTIMATE",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-jdbc/jdbc1-RewriteUtil-rewriteQuery",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-PARALLEL_ESTIMATE")),
+      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-DRIVER_SELECTION",
+      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive1-CUBE_REWRITE",
+      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive1-DRIVER_ESTIMATE",
+      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive1-RewriteUtil-rewriteQuery",
+      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive2-CUBE_REWRITE",
+      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive2-DRIVER_ESTIMATE",
+      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive2-RewriteUtil-rewriteQuery",
+      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-jdbc/jdbc1-CUBE_REWRITE",
+      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-jdbc/jdbc1-DRIVER_ESTIMATE",
+      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-jdbc/jdbc1-RewriteUtil-rewriteQuery",
+      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-PARALLEL_ESTIMATE")),
       reg.getGauges().keySet().toString());
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b3f993d8/lens-server/src/test/java/org/apache/lens/server/rewrite/CubeKeywordRemoverTest.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/rewrite/CubeKeywordRemoverTest.java b/lens-server/src/test/java/org/apache/lens/server/rewrite/CubeKeywordRemoverTest.java
new file mode 100644
index 0000000..956d433
--- /dev/null
+++ b/lens-server/src/test/java/org/apache/lens/server/rewrite/CubeKeywordRemoverTest.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ *
+ */
+package org.apache.lens.server.rewrite;
+
+import static org.testng.Assert.assertEquals;
+
+import org.testng.annotations.DataProvider;
+import org.testng.annotations.Test;
+
+public class CubeKeywordRemoverTest {
+  CubeKeywordRemover cubeKeywordRemover = new CubeKeywordRemover();
+
+  @DataProvider
+  public Object[][] cubeQueryDataProvider() {
+    return new Object[][]{
+      {"cube select blah blah", "select blah blah"},
+      {"cube\tselect blah blah", "select blah blah"},
+      {"cube\nselect blah blah", "select blah blah"},
+      {"CUBE sElEct blAh blAh", "select blAh blAh"},
+    };
+  }
+
+  @Test(dataProvider = "cubeQueryDataProvider")
+  public void testRewrite(String userQuery, String expected) throws Exception {
+    assertEquals(cubeKeywordRemover.rewrite(userQuery, null, null), expected);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/b3f993d8/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java b/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
index 202db82..0e640ec 100644
--- a/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
+++ b/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
@@ -21,6 +21,7 @@ package org.apache.lens.server.rewrite;
 import java.util.*;
 
 import org.apache.lens.api.LensConf;
+import org.apache.lens.cube.metadata.CubeMetastoreClient;
 import org.apache.lens.cube.parse.CubeQueryContext;
 import org.apache.lens.cube.parse.CubeQueryRewriter;
 import org.apache.lens.cube.parse.HQLParser;
@@ -35,7 +36,9 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.*;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.ParseException;
 
 import org.mockito.Matchers;
 import org.mockito.Mockito;
@@ -75,7 +78,19 @@ public class TestRewriting {
   // number of successful queries through mock rewriter
   // we use this number to mock failures after successful queries
   // change the number, if more tests for success needs to be added
-  static final int NUM_SUCCESS = 36;
+  static final int NUM_SUCCESS = 63;
+
+  public static CubeMetastoreClient getMockedClient() {
+    CubeMetastoreClient client = Mockito.mock(CubeMetastoreClient.class);
+    Mockito.when(client.isLensQueryableTable(Matchers.any(String.class))).thenAnswer(new Answer<Boolean>() {
+
+      @Override
+      public Boolean answer(InvocationOnMock invocationOnMock) throws Throwable {
+        return invocationOnMock.getArguments()[0].toString().toLowerCase().contains("cube");
+      }
+    });
+    return client;
+  }
 
   private CubeQueryRewriter getMockedRewriter() throws ParseException, LensException, HiveException {
     CubeQueryRewriter mockwriter = Mockito.mock(CubeQueryRewriter.class);
@@ -83,9 +98,8 @@ public class TestRewriting {
       @Override
       public CubeQueryContext answer(InvocationOnMock invocation) throws Throwable {
         Object[] args = invocation.getArguments();
-        i++;
         // return query for first NUM_SUCCESS calls and fail later
-        if (i <= NUM_SUCCESS) {
+        if (++i <= NUM_SUCCESS) {
           return getMockedCubeContext((String) args[0]);
         } else {
           throw new RuntimeException("Mock fail");
@@ -114,7 +128,8 @@ public class TestRewriting {
     throws ParseException, LensException {
     CubeQueryContext context = Mockito.mock(CubeQueryContext.class);
     Mockito.when(context.toHQL()).thenReturn(query.substring(4));
-    Mockito.when(context.toAST(Matchers.any(Context.class))).thenReturn(HQLParser.parseHQL(query.substring(4), hconf));
+    Mockito.when(context.toAST(Matchers.any(Context.class)))
+      .thenReturn(HQLParser.parseHQL(query.toLowerCase().replaceFirst("^cube", ""), hconf));
     return context;
   }
 
@@ -181,9 +196,10 @@ public class TestRewriting {
     drivers.add(driver);
 
     CubeQueryRewriter mockWriter = getMockedRewriter();
+    CubeMetastoreClient mockClient = getMockedClient();
     PowerMockito.stub(PowerMockito.method(RewriteUtil.class, "getCubeRewriter")).toReturn(mockWriter);
+    PowerMockito.stub(PowerMockito.method(RewriteUtil.class, "getClient")).toReturn(mockClient);
     String q1 = "select name from table";
-    Assert.assertFalse(RewriteUtil.isCubeQuery(q1));
     List<RewriteUtil.CubeQueryInfo> cubeQueries = RewriteUtil.findCubePositions(q1, hconf);
     Assert.assertEquals(cubeQueries.size(), 0);
     QueryContext ctx = new QueryContext(q1, null, lensConf, conf, drivers);
@@ -192,7 +208,6 @@ public class TestRewriting {
     conf.set(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY, TestRewriting.class.getSimpleName());
     driver.configure(conf, null, null);
     String q2 = "cube select name from table";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
@@ -201,20 +216,43 @@ public class TestRewriting {
     MetricRegistry reg = LensMetricsRegistry.getStaticRegistry();
 
     Assert.assertTrue(reg.getGauges().keySet().containsAll(Arrays.asList(
-      "lens.MethodMetricGauge.TestRewriting-"+driver.getFullyQualifiedName()+"-RewriteUtil-rewriteQuery",
-      "lens.MethodMetricGauge.TestRewriting-"+driver.getFullyQualifiedName()+"-1-RewriteUtil-rewriteQuery-toHQL")));
+      "lens.MethodMetricGauge.TestRewriting-" + driver.getFullyQualifiedName() + "-RewriteUtil-rewriteQuery",
+      "lens.MethodMetricGauge.TestRewriting-" + driver.getFullyQualifiedName() + "-1-RewriteUtil-rewriteQuery-toHQL")));
     conf.unset(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY);
 
+    assertIsCubeQuery("select name from cube_table", lensConf, conf, drivers);
+    assertIsCubeQuery("select cube_table.name from cube_table", lensConf, conf, drivers);
+    assertIsCubeQuery("select cube_dim.name, cube_dim2.name from cube_dim "
+      + "join cube_dim2 on cube_dim.d2id=cube_dim2.id", lensConf, conf, drivers);
+    assertIsCubeQuery("select distinct cube_dim.name, cube_dim2.name from cube_dim full outer "
+      + "join cube_dim2 on cube_dim.d2id=cube_dim2.id", lensConf, conf, drivers);
+    assertIsCubeQuery("select cube_dim.name, cube_dim2.name from cube_dim left outer "
+      + "join cube_dim2 on cube_dim.d2id=cube_dim2.id", lensConf, conf, drivers);
+    assertIsCubeQuery("select cube_dim.name, cube_dim2.name from cube_dim inner "
+      + "join cube_dim2 on cube_dim.d2id=cube_dim2.id", lensConf, conf, drivers);
+
+    q2 = "select * from (select name from cube_table) table1";
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
+    Assert.assertEquals(cubeQueries.size(), 1);
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from cube_table");
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+
     q2 = "insert overwrite directory 'target/rewrite' cube select name from table";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
+    q2 = "insert overwrite directory 'target/rewrite' select name from cube_table";
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
+    Assert.assertEquals(cubeQueries.size(), 1);
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from cube_table");
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+
     q2 = "insert overwrite local directory 'target/rewrite' cube select name from table";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
@@ -222,7 +260,6 @@ public class TestRewriting {
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
     q2 = "insert overwrite local directory 'target/example-output' cube select id,name from dim_table";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select id,name from dim_table");
@@ -230,7 +267,6 @@ public class TestRewriting {
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
     q2 = "explain cube select name from table";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
@@ -238,7 +274,6 @@ public class TestRewriting {
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
     q2 = "select * from (cube select name from table) a";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
@@ -246,15 +281,20 @@ public class TestRewriting {
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
     q2 = "insert overwrite directory 'target/rewrite' select * from (cube select name from table) a";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
+    q2 = "insert overwrite directory 'target/rewrite' select * from (select name from cube_table) a";
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
+    Assert.assertEquals(cubeQueries.size(), 1);
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from cube_table");
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+
     q2 = "select * from (cube select name from table)a";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
@@ -262,16 +302,21 @@ public class TestRewriting {
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
     q2 = "select * from  (  cube select name from table   )     a";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
+    q2 = "select * from  (  select name from cube_table   )     a";
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
+    Assert.assertEquals(cubeQueries.size(), 1);
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from cube_table");
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+
     q2 = "select * from (      cube select name from table where"
       + " (name = 'ABC'||name = 'XYZ')&&(key=100)   )       a";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(RewriteUtil.getReplacedQuery(q2), hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from"
@@ -279,9 +324,17 @@ public class TestRewriting {
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
+    q2 = "select * from (      select name from cube_table where"
+      + " (name = 'ABC'||name = 'XYZ')&&(key=100)   )       a";
+    cubeQueries = RewriteUtil.findCubePositions(RewriteUtil.getReplacedQuery(q2), hconf);
+    Assert.assertEquals(cubeQueries.size(), 1);
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from"
+      + " cube_table where (name = 'ABC' OR name = 'XYZ') AND (key=100)");
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+
     conf.set(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY, TestRewriting.class.getSimpleName() + "-multiple");
     q2 = "select * from (cube select name from table) a join (cube select" + " name2 from table2) b";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 2);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
@@ -290,15 +343,15 @@ public class TestRewriting {
     runRewrites(RewriteUtil.rewriteQuery(ctx));
     reg = LensMetricsRegistry.getStaticRegistry();
     Assert.assertTrue(reg.getGauges().keySet().containsAll(Arrays.asList(
-      "lens.MethodMetricGauge.TestRewriting-"+driver.getFullyQualifiedName()+"-1-RewriteUtil-rewriteQuery-toHQL",
-      "lens.MethodMetricGauge.TestRewriting-multiple-"+driver.getFullyQualifiedName()
-        +"-2-RewriteUtil-rewriteQuery-toHQL",
-      "lens.MethodMetricGauge.TestRewriting-multiple-"+driver.getFullyQualifiedName()+"-RewriteUtil-rewriteQuery")));
+      "lens.MethodMetricGauge.TestRewriting-" + driver.getFullyQualifiedName() + "-1-RewriteUtil-rewriteQuery-toHQL",
+      "lens.MethodMetricGauge.TestRewriting-multiple-" + driver.getFullyQualifiedName()
+        + "-2-RewriteUtil-rewriteQuery-toHQL",
+      "lens.MethodMetricGauge.TestRewriting-multiple-" + driver.getFullyQualifiedName()
+        + "-RewriteUtil-rewriteQuery")));
     conf.unset(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY);
 
     q2 = "select * from (cube select name from table) a full outer join"
       + " (cube select name2 from table2) b on a.name=b.name2";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 2);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
@@ -306,16 +359,31 @@ public class TestRewriting {
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
+    q2 = "select * from (select name from cube_table) a full outer join"
+      + " (select name2 from cube_table2) b on a.name=b.name2";
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
+    Assert.assertEquals(cubeQueries.size(), 2);
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from cube_table");
+    Assert.assertEquals(cubeQueries.get(1).query, "select name2 from cube_table2");
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+
     q2 = "select * from (cube select name from table) a join (select name2 from table2) b";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
+    q2 = "select * from (cube select name from table) a join (select name2 from cube_table2) b";
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
+    Assert.assertEquals(cubeQueries.size(), 2);
+    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
+    Assert.assertEquals(cubeQueries.get(1).query, "select name2 from cube_table2");
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+
     q2 = "select * from (cube select name from table union all cube select name2 from table2) u";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
@@ -323,9 +391,24 @@ public class TestRewriting {
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from table2");
 
+    q2 = "select * from (select name from cube_table union all select distinct name2 from cube_table2) u";
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+    Assert.assertEquals(cubeQueries.size(), 2);
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from cube_table");
+    Assert.assertEquals(cubeQueries.get(1).query, "select distinct name2 from cube_table2");
+
+    q2 = "select * from (select distinct name from cube_table union all select name2 from cube_table2) u";
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+    Assert.assertEquals(cubeQueries.size(), 2);
+    Assert.assertEquals(cubeQueries.get(0).query, "select distinct name from cube_table");
+    Assert.assertEquals(cubeQueries.get(1).query, "select name2 from cube_table2");
+
     q2 = "insert overwrite directory 'target/rewrite' "
       + "select * from (cube select name from table union all cube select name2 from table2) u";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
@@ -333,8 +416,16 @@ public class TestRewriting {
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from table2");
 
+    q2 = "insert overwrite directory 'target/rewrite' "
+      + "select * from (cube select name from table union all  select name2 from cube_table2) u";
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+    Assert.assertEquals(cubeQueries.size(), 2);
+    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
+    Assert.assertEquals(cubeQueries.get(1).query, "select name2 from cube_table2");
+
     q2 = "select u.* from (select name from table    union all       cube select name2 from table2)   u";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name2 from table2");
@@ -342,7 +433,6 @@ public class TestRewriting {
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
     q2 = "select u.* from (select name from table union all cube select name2 from table2)u";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name2 from table2");
@@ -351,7 +441,6 @@ public class TestRewriting {
 
     q2 = "select * from (cube select name from table union all cube select name2"
       + " from table2 union all cube select name3 from table3) u";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
@@ -362,7 +451,6 @@ public class TestRewriting {
 
     q2 = "select * from   (     cube select name from table    union all   cube"
       + " select name2 from table2   union all  cube select name3 from table3 )  u";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
@@ -372,7 +460,6 @@ public class TestRewriting {
     Assert.assertEquals(cubeQueries.get(2).query, "cube select name3 from table3");
 
     q2 = "select * from (cube select name from table union all cube select" + " name2 from table2) u group by u.name";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 2);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
@@ -380,8 +467,15 @@ public class TestRewriting {
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
+    q2 = "select * from (cube select name from table union all select" + " name2 from cube_table2) u group by u.name";
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
+    Assert.assertEquals(cubeQueries.size(), 2);
+    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
+    Assert.assertEquals(cubeQueries.get(1).query, "select name2 from cube_table2");
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+
     q2 = "select * from (cube select name from table union all cube select" + " name2 from table2)  u group by u.name";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
@@ -390,16 +484,21 @@ public class TestRewriting {
     Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from table2");
 
     q2 = "create table temp1 as cube select name from table";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
 
+    q2 = "create table temp1 as select name from cube_table";
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+    Assert.assertEquals(cubeQueries.size(), 1);
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from cube_table");
+
     q2 = "create table temp1 as select * from (cube select name from table union all cube select"
       + " name2 from table2)  u group by u.name";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
@@ -407,9 +506,18 @@ public class TestRewriting {
     Assert.assertEquals(cubeQueries.get(0).query, "cube select name from table");
     Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from table2");
 
+    q2 = "create table temp1 as select * from (select name from cube_table union all cube select"
+      + " name2 from table2)  u group by u.name";
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+    Assert.assertEquals(cubeQueries.size(), 2);
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from cube_table");
+    Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from table2");
+
+
     q2 = "create table temp1 as cube select name from table where"
       + " time_range_in('dt', '2014-06-24-23', '2014-06-25-00')";
-    Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
@@ -417,6 +525,15 @@ public class TestRewriting {
     Assert.assertEquals(cubeQueries.get(0).query,
       "cube select name from table where time_range_in('dt', '2014-06-24-23', '2014-06-25-00')");
 
+    q2 = "create table temp1 as select name from cube_table where"
+      + " time_range_in('dt', '2014-06-24-23', '2014-06-25-00')";
+    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
+    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+    Assert.assertEquals(cubeQueries.size(), 1);
+    Assert.assertEquals(cubeQueries.get(0).query,
+      "select name from cube_table where time_range_in('dt', '2014-06-24-23', '2014-06-25-00')");
+
     // failing query for second driver
     MockDriver driver2 = new MockDriver();
     driver2.configure(conf, null, null);
@@ -460,4 +577,13 @@ public class TestRewriting {
     Assert.assertNull(runnables.get(driver2).getRewrittenQuery());
     Assert.assertNotNull(ctx.getDriverRewriteError(driver2));
   }
+
+  private void assertIsCubeQuery(String query, LensConf lensConf, Configuration conf, List<LensDriver> drivers)
+    throws LensException {
+    List<RewriteUtil.CubeQueryInfo> cubeQueries = RewriteUtil.findCubePositions(query, hconf);
+    Assert.assertEquals(cubeQueries.size(), 1);
+    Assert.assertEquals(cubeQueries.get(0).query, query);
+    QueryContext ctx = new QueryContext(query, null, lensConf, conf, drivers);
+    runRewrites(RewriteUtil.rewriteQuery(ctx));
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b3f993d8/src/site/apt/user/olap-cube.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/user/olap-cube.apt b/src/site/apt/user/olap-cube.apt
index c47ac30..4bed623 100644
--- a/src/site/apt/user/olap-cube.apt
+++ b/src/site/apt/user/olap-cube.apt
@@ -283,7 +283,7 @@ lens-shell>
 
 +---+
 
-  CUBE SELECT [DISTINCT] select_expr, select_expr, ...
+  [CUBE] SELECT [DISTINCT] select_expr, select_expr, ...
   FROM cube_table_reference
   [WHERE [where_condition AND] [TIME_RANGE_IN(colName, from, to)]]
   [GROUP BY col_list]


[20/51] [abbrv] lens git commit: LENS-719 : Fix setting start time for a fact's storage/update period

Posted by de...@apache.org.
LENS-719 : Fix setting start time for a fact's storage/update period


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/71cf9ffd
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/71cf9ffd
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/71cf9ffd

Branch: refs/heads/current-release-line
Commit: 71cf9ffd0a63b289dd7777a09d541d0932d6feb9
Parents: 1b475f2
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Fri Jan 8 14:55:58 2016 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Fri Jan 8 14:55:58 2016 +0530

----------------------------------------------------------------------
 .../lens/cube/metadata/CubeMetastoreClient.java | 31 ++++++++++++++++++++
 .../lens/cube/parse/StorageTableResolver.java   | 28 +-----------------
 .../cube/metadata/TestCubeMetastoreClient.java  | 21 +++++++++++++
 3 files changed, 53 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/71cf9ffd/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index e7550ca..ae0fb90 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -19,6 +19,7 @@
 
 package org.apache.lens.cube.metadata;
 
+import static org.apache.lens.cube.metadata.DateUtil.resolveDate;
 import static org.apache.lens.cube.metadata.MetastoreUtil.*;
 
 import java.text.ParseException;
@@ -2162,4 +2163,34 @@ public class CubeMetastoreClient {
       throw new HiveException(dimTableName + " is not a dimension table");
     }
   }
+  public boolean isStorageTableCandidateForRange(String storageTableName, Date fromDate, Date toDate) throws
+    HiveException, LensException {
+    Date now = new Date();
+    String startProperty = getTable(storageTableName).getProperty(getStoragetableStartTimesKey());
+    if (StringUtils.isNotBlank(startProperty)) {
+      for (String timeStr : startProperty.split("\\s*,\\s*")) {
+        if (fromDate.before(resolveDate(timeStr, now))) {
+          log.info("from date {} is before validity start time: {}, hence discarding {}",
+            fromDate, timeStr, storageTableName);
+          return false;
+        }
+      }
+    }
+    String endProperty = getTable(storageTableName).getProperty(getStoragetableEndTimesKey());
+    if (StringUtils.isNotBlank(endProperty)) {
+      for (String timeStr : endProperty.split("\\s*,\\s*")) {
+        if (toDate.after(resolveDate(timeStr, now))) {
+          log.info("to date {} is after validity end time: {}, hence discarding {}",
+            toDate, timeStr, storageTableName);
+          return false;
+        }
+      }
+    }
+    return true;
+  }
+  public boolean isStorageTableCandidateForRange(String storageTableName, String fromDate, String toDate) throws
+    HiveException, LensException {
+    Date now = new Date();
+    return isStorageTableCandidateForRange(storageTableName, resolveDate(fromDate, now), resolveDate(toDate, now));
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/71cf9ffd/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index 14def15..de5f95e 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -565,7 +565,7 @@ class StorageTableResolver implements ContextRewriter {
     Iterator<String> it = storageTbls.iterator();
     while (it.hasNext()) {
       String storageTableName = it.next();
-      if (!isStorageTableCandidateForRange(storageTableName, fromDate, toDate)) {
+      if (!client.isStorageTableCandidateForRange(storageTableName, fromDate, toDate)) {
         skipStorageCauses.put(storageTableName, new SkipStorageCause(RANGE_NOT_ANSWERABLE));
         it.remove();
       } else if (!client.partColExists(storageTableName, partCol)) {
@@ -685,32 +685,6 @@ class StorageTableResolver implements ContextRewriter {
         updatePeriods, addNonExistingParts, failOnPartialData, skipStorageCauses, missingPartitions);
   }
 
-  private boolean isStorageTableCandidateForRange(String storageTableName, Date fromDate, Date toDate) throws
-    HiveException, LensException {
-    Date now = new Date();
-    String startProperty = client.getTable(storageTableName).getProperty(getStoragetableStartTimesKey());
-    if (StringUtils.isNotBlank(startProperty)) {
-      for (String timeStr : startProperty.split("\\s*,\\s*")) {
-        if (toDate.before(DateUtil.resolveDate(timeStr, now))) {
-          log.info("from date {} is before validity start time: {}, hence discarding {}",
-            toDate, timeStr, storageTableName);
-          return false;
-        }
-      }
-    }
-    String endProperty = client.getTable(storageTableName).getProperty(getStoragetableEndTimesKey());
-    if (StringUtils.isNotBlank(endProperty)) {
-      for (String timeStr : endProperty.split("\\s*,\\s*")) {
-        if (fromDate.after(DateUtil.resolveDate(timeStr, now))) {
-          log.info("to date {} is after validity end time: {}, hence discarding {}",
-            fromDate, timeStr, storageTableName);
-          return false;
-        }
-      }
-    }
-    return true;
-  }
-
   private void updateFactPartitionStorageTablesFrom(CubeFactTable fact,
     FactPartition part, Set<String> storageTableNames) throws LensException, HiveException, ParseException {
     for (String storageTableName : storageTableNames) {

http://git-wip-us.apache.org/repos/asf/lens/blob/71cf9ffd/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index 0fef13f..1638825 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -773,6 +773,10 @@ public class TestCubeMetastoreClient {
 
     StorageTableDesc s1 = new StorageTableDesc(TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class,
       datePartSingleton, datePartKeySingleton);
+
+    s1.getTblProps().put(MetastoreUtil.getStoragetableStartTimesKey(), "2015, now-10 days");
+    s1.getTblProps().put(MetastoreUtil.getStoragetableEndTimesKey(), "now - 1 day");
+
     Map<String, Set<UpdatePeriod>> updatePeriods = getHashMap(c1, Sets.newHashSet(HOURLY, DAILY));
     Map<String, StorageTableDesc> storageTables = getHashMap(c1, s1);
 
@@ -794,6 +798,9 @@ public class TestCubeMetastoreClient {
       String storageTableName = getFactOrDimtableStorageTableName(factName, entry);
       assertTrue(client.tableExists(storageTableName));
     }
+    String storageTable = getFactOrDimtableStorageTableName(factName, c1);
+    assertRangeValidityForStorageTable(storageTable);
+
 
     Map<String, Date> timeParts = getTimePartitionByOffsets(getDatePartitionKey(), 0, "non_existing_part_col", 0);
     // test error on adding invalid partition
@@ -847,6 +854,20 @@ public class TestCubeMetastoreClient {
     assertFalse(client.latestPartitionExists(cubeFact.getName(), c1, getDatePartitionKey()));
   }
 
+  private void assertRangeValidityForStorageTable(String storageTable) throws HiveException, LensException {
+    Object[][] testCases = new Object[][] {
+      {"now - 15 days", "now - 11 days", false},
+      {"now - 15 days", "now - 1 hour", false},
+      {"now - 9 days", "now - 1 hour", false},
+      {"now - 3 hour", "now - 1 hour", false},
+      {"now - 9 days", "now - 2 days", true},
+    };
+    for(Object[] testCase: testCases) {
+      assertEquals(client.isStorageTableCandidateForRange(storageTable, testCase[0].toString(), testCase[1].toString()),
+        testCase[2]);
+    }
+  }
+
   @Test(priority = 2)
   public void testAlterCubeFact() throws Exception {
     String factName = "test_alter_fact";


[07/51] [abbrv] lens git commit: LENS-270 : The exception thrown for no candidate fact should contain only brief error

Posted by de...@apache.org.
LENS-270 : The exception thrown for no candidate fact should contain only brief error


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/36166a2e
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/36166a2e
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/36166a2e

Branch: refs/heads/current-release-line
Commit: 36166a2e58a2a89bd97dc8595cb7920fbf4253d8
Parents: bf4c0be
Author: Sushil Mohanty <su...@apache.org>
Authored: Mon Dec 14 09:57:45 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Mon Dec 14 09:57:45 2015 +0530

----------------------------------------------------------------------
 .../NoCandidateFactAvailableException.java      | 48 ++++++++++++++++++++
 .../lens/cube/parse/CubeQueryContext.java       |  4 +-
 .../org/apache/lens/cube/parse/PruneCauses.java | 12 +++++
 .../lens/cube/parse/TestBaseCubeQueries.java    | 19 +++++++-
 .../lens/cube/parse/TestCubeRewriter.java       |  7 ++-
 .../cube/parse/TestDenormalizationResolver.java |  4 +-
 .../lens/cube/parse/TestTimeRangeResolver.java  |  4 +-
 7 files changed, 91 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
new file mode 100644
index 0000000..b2568ff
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.error;
+
+import org.apache.lens.cube.metadata.CubeFactTable;
+import org.apache.lens.cube.parse.PruneCauses;
+import org.apache.lens.server.api.error.LensException;
+
+
+public class NoCandidateFactAvailableException extends LensException {
+
+  private final PruneCauses<CubeFactTable> briefAndDetailedError;
+
+  public NoCandidateFactAvailableException(PruneCauses<CubeFactTable> briefAndDetailedError) {
+    super(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo(), briefAndDetailedError.getBriefCause());
+    this.briefAndDetailedError = briefAndDetailedError;
+  }
+
+  public PruneCauses.BriefAndDetailedError getJsonMessage() {
+    return briefAndDetailedError.toJsonObject();
+  }
+
+  @Override
+  public int compareTo(LensException e) {
+    //Compare the max CandidateTablePruneCode coming from different instances.
+    if (e instanceof NoCandidateFactAvailableException) {
+      return briefAndDetailedError.getMaxCause().compareTo(
+               ((NoCandidateFactAvailableException) e).briefAndDetailedError.getMaxCause());
+    }
+    return super.compareTo(e);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index cf114c9..f75a6b9 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -32,6 +32,7 @@ import java.io.IOException;
 import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.server.api.error.LensException;
@@ -845,7 +846,8 @@ public class CubeQueryContext implements TrackQueriedColumns {
             }
           }
         }
-        throw new LensException(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo(), reason);
+        log.error("Query rewrite failed due to NO_CANDIDATE_FACT_AVAILABLE, Cause {}", factPruningMsgs.toJsonObject());
+        throw new NoCandidateFactAvailableException(factPruningMsgs);
       }
     }
     return facts;

http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
index 7a92b3b..9b5a52f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
@@ -41,6 +41,8 @@ public class PruneCauses<T extends AbstractCubeTable> extends HashMap<T, List<Ca
   private final HashMap<CandidateTablePruneCause, List<T>> reversed = reverse();
   @Getter(lazy = true)
   private final HashMap<String, List<CandidateTablePruneCause>> compact = computeCompact();
+  @Getter(lazy = true)
+  private final CandidateTablePruneCode maxCause  = computeMaxCause();
 
   private HashMap<String, List<CandidateTablePruneCause>> computeCompact() {
     HashMap<String, List<CandidateTablePruneCause>> detailedMessage = Maps.newHashMap();
@@ -81,6 +83,16 @@ public class PruneCauses<T extends AbstractCubeTable> extends HashMap<T, List<Ca
     return new BriefAndDetailedError(getBriefCause(), getCompact());
   }
 
+  private CandidateTablePruneCode computeMaxCause() {
+    CandidateTablePruneCode maxCause = CandidateTablePruneCode.values()[0];
+    for (CandidateTablePruneCause cause : getReversed().keySet()) {
+      if (cause.getCause().compareTo(maxCause) > 0) {
+        maxCause = cause.getCause();
+      }
+    }
+    return maxCause;
+  }
+
   public String getBriefCause() {
     CandidateTablePruneCode maxCause = CandidateTablePruneCode.values()[0];
     for (CandidateTablePruneCause cause : getReversed().keySet()) {

http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 97c6d08..a5886dc 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -34,6 +34,7 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
@@ -67,6 +68,18 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
   }
 
   @Test
+  public void testNoCandidateFactAvailableExceptionCompareTo() throws Exception {
+    //maxCause : COLUMN_NOT_FOUND, Ordinal : 9
+    NoCandidateFactAvailableException ne1 =(NoCandidateFactAvailableException)
+            getLensExceptionInRewrite("select dim1, test_time_dim, msr3, msr13 from basecube where "
+            + TWO_DAYS_RANGE, conf);
+    //maxCause : FACT_NOT_AVAILABLE_IN_RANGE, Ordinal : 1
+    NoCandidateFactAvailableException ne2 = (NoCandidateFactAvailableException)
+            getLensExceptionInRewrite("cube select dim1 from " + cubeName + " where " + LAST_YEAR_RANGE, getConf());
+    assertEquals(ne1.compareTo(ne2), 8);
+  }
+
+  @Test
   public void testColumnErrors() throws Exception {
     LensException e;
 
@@ -78,7 +91,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
       + TWO_DAYS_RANGE, conf);
     assertEquals(e.getErrorCode(),
         LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
-    PruneCauses.BriefAndDetailedError pruneCauses = extractPruneCause(e);
+    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e;
+    PruneCauses.BriefAndDetailedError pruneCauses = ne.getJsonMessage();
     String regexp = String.format(CandidateTablePruneCause.CandidateTablePruneCode.COLUMN_NOT_FOUND.errorFormat,
       "Column Sets: (.*?)", "queriable together");
     Matcher matcher = Pattern.compile(regexp).matcher(pruneCauses.getBrief());
@@ -494,7 +508,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
     LensException exc =
       getLensExceptionInRewrite("cube select msr12 from basecube where " + TWO_DAYS_RANGE, conf);
-    PruneCauses.BriefAndDetailedError pruneCause = extractPruneCause(exc);
+    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) exc;
+    PruneCauses.BriefAndDetailedError pruneCause = ne.getJsonMessage();
     assertTrue(pruneCause.getBrief().contains("Missing partitions"));
     assertEquals(pruneCause.getDetails().get("testfact2_base").iterator().next().getCause(), MISSING_PARTITIONS);
     assertEquals(pruneCause.getDetails().get("testfact2_base").iterator().next().getMissingPartitions().size(), 1);

http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 0f05556..802ff42 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -32,6 +32,7 @@ import java.text.SimpleDateFormat;
 import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCause;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode;
@@ -186,7 +187,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     LensException th = getLensExceptionInRewrite(
       "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     assertEquals(th.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
-    PruneCauses.BriefAndDetailedError pruneCauses = extractPruneCause(th);
+    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) th;
+    PruneCauses.BriefAndDetailedError pruneCauses = ne.getJsonMessage();
     int endIndex = MISSING_PARTITIONS.errorFormat.length() - 3;
     assertEquals(
       pruneCauses.getBrief().substring(0, endIndex),
@@ -1031,7 +1033,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     LensException e = getLensExceptionInRewrite(
       "select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
     assertEquals(e.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
-    PruneCauses.BriefAndDetailedError pruneCauses = extractPruneCause(e);
+    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e;
+    PruneCauses.BriefAndDetailedError pruneCauses = ne.getJsonMessage();
 
     assertEquals(
       pruneCauses.getBrief().substring(0, MISSING_PARTITIONS.errorFormat.length() - 3),

http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
index 36c1dba..af9daad 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
@@ -24,6 +24,7 @@ import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.server.api.error.LensException;
 
@@ -149,7 +150,8 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
     TestCubeRewriter.compareQueries(hqlQuery, expected);
     LensException e = getLensExceptionInRewrite(
       "select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE, tconf);
-    PruneCauses.BriefAndDetailedError error = extractPruneCause(e);
+    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e;
+    PruneCauses.BriefAndDetailedError error = ne.getJsonMessage();
     Assert.assertEquals(error.getBrief(), CandidateTablePruneCode.NO_CANDIDATE_STORAGES.errorFormat);
 
     HashMap<String, List<CandidateTablePruneCause>> details = error.getDetails();

http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
index da0e4f4..2ac837d 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
@@ -29,6 +29,7 @@ import static org.testng.Assert.assertTrue;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
@@ -66,7 +67,8 @@ public class TestTimeRangeResolver extends TestQueryRewrite {
     LensException e =
       getLensExceptionInRewrite("cube select msr2 from " + cubeName + " where " + LAST_YEAR_RANGE,
         getConf());
-    PruneCauses.BriefAndDetailedError causes = extractPruneCause(e);
+    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e;
+    PruneCauses.BriefAndDetailedError causes = ne.getJsonMessage();
     assertTrue(causes.getBrief().contains("Columns [msr2] are not present in any table"));
     assertEquals(causes.getDetails().size(), 2);
 


[42/51] [abbrv] lens git commit: LENS-813: For multifact queries, having clauses are getting added to both sub queries.

Posted by de...@apache.org.
LENS-813: For multifact queries, having clauses are getting added to both sub queries.


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/2f0e5fdb
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/2f0e5fdb
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/2f0e5fdb

Branch: refs/heads/current-release-line
Commit: 2f0e5fdbf079c301bcd41724e61b90b9af4fad33
Parents: 2539f33
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Mon Feb 1 21:23:01 2016 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Mon Feb 1 21:23:01 2016 +0530

----------------------------------------------------------------------
 .../lens/cube/error/LensCubeErrorCode.java      |   1 -
 .../apache/lens/cube/parse/AliasDecider.java    |  26 ++
 .../apache/lens/cube/parse/CandidateFact.java   |  72 ++++--
 .../lens/cube/parse/CubeQueryContext.java       |  36 ++-
 .../lens/cube/parse/DefaultAliasDecider.java    |  31 +++
 .../lens/cube/parse/ExpressionResolver.java     |   4 +-
 .../org/apache/lens/cube/parse/HQLParser.java   |  41 +++
 .../lens/cube/parse/MultiFactHQLContext.java    |  85 ++++++-
 .../parse/SingleFactMultiStorageHQLContext.java |  50 +---
 .../lens/cube/parse/TestBaseCubeQueries.java    | 249 +++++++++++++++++--
 10 files changed, 491 insertions(+), 104 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
index 61d08b2..e49b58b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
@@ -57,7 +57,6 @@ public enum LensCubeErrorCode {
   STORAGE_UNION_DISABLED(3031, 1500),
   COULD_NOT_PARSE_EXPRESSION(3032, 1500),
   QUERIED_TABLE_NOT_FOUND(3033, 0),
-
   // Error codes greater than 3100 are errors while doing a metastore operation.
   ERROR_IN_ENTITY_DEFINITION(3101, 100),
   TIMELINE_ABSENT(3102, 100),

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasDecider.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasDecider.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasDecider.java
new file mode 100644
index 0000000..e9ddb9f
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasDecider.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+
+
+public interface AliasDecider {
+  String decideAlias(ASTNode node);
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index c305244..4faebe1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -29,6 +29,7 @@ import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
@@ -54,19 +55,26 @@ public class CandidateFact implements CandidateTable, QueryAST {
   private final Set<FactPartition> partsQueried = Sets.newHashSet();
 
   private CubeInterface baseTable;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode selectAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode whereAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode groupByAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode havingAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode joinAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode orderByAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private Integer limitValue;
   private List<TimeRangeNode> timenodes = Lists.newArrayList();
   private final List<Integer> selectIndices = Lists.newArrayList();
@@ -103,6 +111,40 @@ public class CandidateFact implements CandidateTable, QueryAST {
     return (!timeRange.getFromDate().before(fact.getStartTime())) && (!timeRange.getToDate().after(fact.getEndTime()));
   }
 
+  public void addToHaving(ASTNode ast) {
+    if (getHavingAST() == null) {
+      setHavingAST(new ASTNode(new CommonToken(TOK_HAVING, "TOK_HAVING")));
+      getHavingAST().addChild(ast);
+      return;
+    }
+    ASTNode existingHavingAST = (ASTNode) getHavingAST().getChild(0);
+    ASTNode newHavingAST = new ASTNode(new CommonToken(KW_AND, "KW_AND"));
+    newHavingAST.addChild(ast);
+    newHavingAST.addChild(existingHavingAST);
+    getHavingAST().setChild(0, newHavingAST);
+  }
+
+  public String addAndGetAliasFromSelect(ASTNode ast, AliasDecider aliasDecider) {
+    for (Node n : getSelectAST().getChildren()) {
+      ASTNode astNode = (ASTNode) n;
+      if (HQLParser.equalsAST(ast, (ASTNode) astNode.getChild(0))) {
+        if (astNode.getChildCount() > 1) {
+          return astNode.getChild(1).getText();
+        }
+        String alias = aliasDecider.decideAlias(astNode);
+        astNode.addChild(new ASTNode(new CommonToken(Identifier, alias)));
+        return alias;
+      }
+    }
+    // Not found, have to add to select
+    String alias = aliasDecider.decideAlias(ast);
+    ASTNode selectExprNode = new ASTNode(new CommonToken(TOK_SELEXPR));
+    selectExprNode.addChild(ast);
+    selectExprNode.addChild(new ASTNode(new CommonToken(Identifier, alias)));
+    getSelectAST().addChild(selectExprNode);
+    return alias;
+  }
+
   static class TimeRangeNode {
     ASTNode timenode;
     ASTNode parent;
@@ -129,15 +171,17 @@ public class CandidateFact implements CandidateTable, QueryAST {
     if (cubeql.getGroupByAST() != null) {
       setGroupByAST(HQLParser.copyAST(cubeql.getGroupByAST()));
     }
-    if (cubeql.getHavingAST() != null) {
-      setHavingAST(HQLParser.copyAST(cubeql.getHavingAST()));
-    }
   }
 
+
   public String getWhereClause(String storageTable) {
     return getStorgeWhereClauseMap().get(storageTable);
   }
 
+  public boolean isExpressionAnswerable(ASTNode node, CubeQueryContext context) throws LensException {
+    return getColumns().containsAll(getColsInExpr(context, context.getCube().getAllFieldNames(), node));
+  }
+
   /**
    * Update the ASTs to include only the fields queried from this fact, in all the expressions
    *
@@ -179,11 +223,10 @@ public class CandidateFact implements CandidateTable, QueryAST {
       currentChild++;
     }
 
-    // update whereAST to include only filters of this fact
-    // TODO
+    // don't need to update where ast, since where is only on dim attributes and dim attributes
+    // are assumed to be common in multi fact queries.
 
-    // update havingAST to include only filters of this fact
-    // TODO
+    // push down of having clauses happens just after this call in cubequerycontext
   }
 
   private Set<String> getColsInExpr(final CubeQueryContext cubeql, final Set<String> cubeCols,
@@ -304,7 +347,6 @@ public class CandidateFact implements CandidateTable, QueryAST {
   }
 
 
-
   /**
    * @return the selectIndices
    */

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 79dd88c..ebf8875 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -25,6 +25,8 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
 import static com.google.common.base.Preconditions.checkArgument;
 
+
+
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.util.*;
@@ -132,9 +134,11 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   protected final Map<Dimension, Set<CandidateDim>> candidateDims = new HashMap<Dimension, Set<CandidateDim>>();
 
   // query trees
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode havingAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode selectAST;
 
   // Will be set after the Fact is picked and time ranges replaced
@@ -142,7 +146,8 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   @Setter
   private ASTNode whereAST;
 
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode orderByAST;
   // Setter is used in promoting the select when promotion is on.
   @Getter
@@ -352,7 +357,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
 
   // map of ref column in query to set of Dimension that have the column - which are added as optional dims
   @Getter
-  private Map<String, Set<Aliased<Dimension>>>  refColToDim = Maps.newHashMap();
+  private Map<String, Set<Aliased<Dimension>>> refColToDim = Maps.newHashMap();
 
   public void updateRefColDim(String col, Aliased<Dimension> dim) {
     Set<Aliased<Dimension>> refDims = refColToDim.get(col.toLowerCase());
@@ -369,10 +374,11 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
     private String exprCol;
     private String alias;
   }
+
   // map of expression column in query to set of Dimension that are accessed in the expression column - which are added
   // as optional dims
   @Getter
-  private Map<QueriedExprColumn, Set<Aliased<Dimension>>>  exprColToDim = Maps.newHashMap();
+  private Map<QueriedExprColumn, Set<Aliased<Dimension>>> exprColToDim = Maps.newHashMap();
 
   public void updateExprColDim(String tblAlias, String col, Aliased<Dimension> dim) {
 
@@ -400,7 +406,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   }
 
   public void addOptionalJoinDimTable(String alias, boolean isRequired) throws LensException {
-    addOptionalDimTable(alias, null, isRequired, null, false, (String[])null);
+    addOptionalDimTable(alias, null, isRequired, null, false, (String[]) null);
   }
 
   public void addOptionalExprDimTable(String dimAlias, String queriedExpr, String srcTableAlias,
@@ -665,6 +671,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   public Integer getLimitValue() {
     return qb.getParseInfo().getDestLimit(getClause());
   }
+
   public void setLimitValue(Integer value) {
     qb.getParseInfo().setDestLimit(getClause(), value);
   }
@@ -809,7 +816,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
             }
           }
           log.error("Query rewrite failed due to NO_CANDIDATE_DIM_AVAILABLE, Cause {}",
-                  dimPruningMsgs.get(dim).toJsonObject());
+            dimPruningMsgs.get(dim).toJsonObject());
           throw new NoCandidateDimAvailableException(dimPruningMsgs.get(dim));
         }
       }
@@ -852,8 +859,10 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   }
 
   private HQLContextInterface hqlContext;
-  @Getter private Collection<CandidateFact> pickedFacts;
-  @Getter private Collection<CandidateDim> pickedDimTables;
+  @Getter
+  private Collection<CandidateFact> pickedFacts;
+  @Getter
+  private Collection<CandidateDim> pickedDimTables;
 
   private void addRangeClauses(CandidateFact fact) throws LensException {
     if (fact != null) {
@@ -881,7 +890,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
       autoJoinCtx.pruneAllPaths(cube, cfacts, dimsToQuery);
     }
 
-    Map<CandidateFact, Set<Dimension>> factDimMap = new HashMap<CandidateFact, Set<Dimension>>();
+    Map<CandidateFact, Set<Dimension>> factDimMap = new HashMap<>();
     if (cfacts != null) {
       if (cfacts.size() > 1) {
         // copy ASTs for each fact
@@ -890,8 +899,6 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
           factDimMap.put(cfact, new HashSet<>(dimsToQuery.keySet()));
         }
       }
-    }
-    if (cfacts != null) {
       for (CandidateFact fact : cfacts) {
         addRangeClauses(fact);
       }
@@ -907,6 +914,9 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
           factDimMap.get(cfact).addAll(factExprDimTables);
         }
       }
+      if (cfacts.size() > 1) {
+        havingAST = MultiFactHQLContext.pushDownHaving(havingAST, this, cfacts);
+      }
     } else {
       // dim only query
       exprDimensions.addAll(exprCtx.rewriteExprCtx(null, dimsToQuery, this));
@@ -957,6 +967,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
         for (CandidateFact cfact : cfacts) {
           cfact.updateASTs(this);
         }
+        whereAST = MultiFactHQLContext.convertHavingToWhere(havingAST, this, cfacts, new DefaultAliasDecider());
       }
     }
     hqlContext = createHQLContext(cfacts, dimsToQuery, factDimMap);
@@ -1069,6 +1080,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
 
     return isCubeMeasure(msrname);
   }
+
   public boolean isAggregateExpr(String expr) {
     return aggregateExprs.contains(expr == null ? null : expr.toLowerCase());
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
new file mode 100644
index 0000000..dadbfa0
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+
+
+public class DefaultAliasDecider implements AliasDecider {
+  int counter = 0;
+  private static final String ALIAS_PREFIX = "alias";
+
+  public String decideAlias(ASTNode node) {
+    return ALIAS_PREFIX + (counter++);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 5ff265d..fa81831 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -465,7 +465,9 @@ class ExpressionResolver implements ContextRewriter {
       replaceAST(cubeql, queryAST.getWhereAST());
       replaceAST(cubeql, queryAST.getJoinAST());
       replaceAST(cubeql, queryAST.getGroupByAST());
-      replaceAST(cubeql, queryAST.getHavingAST());
+      // Having AST is resolved by each fact, so that all facts can expand their expressions.
+      // Having ast is not copied now, it's maintained in cubeql, each fact processes that serially.
+      replaceAST(cubeql, cubeql.getHavingAST());
       replaceAST(cubeql, cubeql.getOrderByAST());
     }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index c9aff5d..fdef3f1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -41,6 +41,8 @@ import org.antlr.runtime.tree.Tree;
 
 import com.google.common.base.Optional;
 
+import com.google.common.collect.Sets;
+import lombok.Data;
 import lombok.extern.slf4j.Slf4j;
 
 
@@ -81,6 +83,7 @@ public final class HQLParser {
   }
 
   public static final Set<Integer> BINARY_OPERATORS;
+  public static final Set<Integer> FILTER_OPERATORS;
   public static final Set<Integer> ARITHMETIC_OPERATORS;
   public static final Set<Integer> UNARY_OPERATORS;
   public static final Set<Integer> PRIMITIVE_TYPES;
@@ -140,6 +143,9 @@ public final class HQLParser {
     primitiveTypes.add(TOK_VARCHAR);
     primitiveTypes.add(TOK_CHAR);
     PRIMITIVE_TYPES = Collections.unmodifiableSet(primitiveTypes);
+
+    FILTER_OPERATORS = Sets.newHashSet(KW_IN, GREATERTHAN, GREATERTHANOREQUALTO, LESSTHAN, LESSTHANOREQUALTO, EQUAL,
+      EQUAL_NS);
   }
 
   public static boolean isArithmeticOp(int tokenType) {
@@ -840,4 +846,39 @@ public final class HQLParser {
     }
     return node;
   }
+  @Data
+  public static class HashableASTNode {
+    private ASTNode ast;
+    private int hashCode = -1;
+    private boolean hashCodeComputed = false;
+
+    public HashableASTNode(ASTNode ast) {
+      this.ast = ast;
+    }
+
+    public void setAST(ASTNode ast) {
+      this.ast = ast;
+      hashCodeComputed = false;
+    }
+
+    public ASTNode getAST() {
+      return ast;
+    }
+
+    @Override
+    public int hashCode() {
+      if (!hashCodeComputed) {
+        hashCode = getString(ast).hashCode();
+        hashCodeComputed = true;
+      }
+      return hashCode;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+      return o instanceof HashableASTNode && this.hashCode() == o.hashCode() && getString(this.getAST())
+        .trim().equalsIgnoreCase(getString(((HashableASTNode) o).getAST()).trim());
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
index 1a729f8..9c18b7e 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
@@ -18,20 +18,28 @@
  */
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.parse.HQLParser.*;
+
 import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.Dimension;
 import org.apache.lens.server.api.error.LensException;
 
+import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+
+import org.antlr.runtime.CommonToken;
 
 import com.google.common.collect.Lists;
+import lombok.extern.slf4j.Slf4j;
 
 /**
  * Writes a join query with all the facts involved, with where, groupby and having expressions pushed down to the fact
  * queries.
  */
+@Slf4j
 class MultiFactHQLContext extends SimpleHQLContext {
 
   private Set<CandidateFact> facts;
@@ -76,7 +84,7 @@ class MultiFactHQLContext extends SimpleHQLContext {
   }
 
   private String getWhereString() {
-    return null;
+    return query.getWhereTree();
   }
 
   public String toHQL() throws LensException {
@@ -150,4 +158,79 @@ class MultiFactHQLContext extends SimpleHQLContext {
     }
     return fromBuilder.toString();
   }
+
+
+  public static ASTNode convertHavingToWhere(ASTNode havingAST, CubeQueryContext context, Set<CandidateFact> cfacts,
+    AliasDecider aliasDecider) throws LensException {
+    if (havingAST == null) {
+      return null;
+    }
+    if (isAggregateAST(havingAST) || isTableColumnAST(havingAST) || isNonAggregateFunctionAST(havingAST)) {
+      // if already present in select, pick alias
+      String alias = null;
+      for (CandidateFact fact : cfacts) {
+        if (fact.isExpressionAnswerable(havingAST, context)) {
+          alias = fact.addAndGetAliasFromSelect(havingAST, aliasDecider);
+          return new ASTNode(new CommonToken(HiveParser.Identifier, alias));
+        }
+      }
+    }
+    if (havingAST.getChildren() != null) {
+      for (int i = 0; i < havingAST.getChildCount(); i++) {
+        ASTNode replaced = convertHavingToWhere((ASTNode) havingAST.getChild(i), context, cfacts, aliasDecider);
+        havingAST.setChild(i, replaced);
+      }
+    }
+    return havingAST;
+  }
+
+  public static ASTNode pushDownHaving(ASTNode ast, CubeQueryContext cubeQueryContext, Set<CandidateFact> cfacts)
+    throws LensException {
+    if (ast == null) {
+      return null;
+    }
+    if (ast.getType() == HiveParser.KW_AND || ast.getType() == HiveParser.TOK_HAVING) {
+      List<ASTNode> children = Lists.newArrayList();
+      for (Node child : ast.getChildren()) {
+        ASTNode newChild = pushDownHaving((ASTNode) child, cubeQueryContext, cfacts);
+        if (newChild != null) {
+          children.add(newChild);
+        }
+      }
+      if (children.size() == 0) {
+        return null;
+      } else if (children.size() == 1) {
+        return children.get(0);
+      } else {
+        ASTNode newASTNode = new ASTNode(ast.getToken());
+        for (ASTNode child : children) {
+          newASTNode.addChild(child);
+        }
+        return newASTNode;
+      }
+    }
+    if (isPrimitiveBooleanExpression(ast)) {
+      CandidateFact fact = pickFactToPushDown(ast, cubeQueryContext, cfacts);
+      if (fact == null) {
+        return ast;
+      }
+      fact.addToHaving(ast);
+      return null;
+    }
+    return ast;
+  }
+
+  private static CandidateFact pickFactToPushDown(ASTNode ast, CubeQueryContext cubeQueryContext, Set<CandidateFact>
+    cfacts) throws LensException {
+    for (CandidateFact fact : cfacts) {
+      if (fact.isExpressionAnswerable(ast, cubeQueryContext)) {
+        return fact;
+      }
+    }
+    return null;
+  }
+
+  private static boolean isPrimitiveBooleanExpression(ASTNode ast) {
+    return HQLParser.FILTER_OPERATORS.contains(ast.getType());
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
index 7e3a0bf..9f16c5a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
@@ -38,52 +38,13 @@ import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 
 import org.antlr.runtime.CommonToken;
-import org.antlr.runtime.tree.Tree;
-
-import lombok.Data;
-
 
 public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
 
   private final QueryAST ast;
-  int aliasCounter = 0;
-
-  @Data
-  public static class HashableASTNode {
-    private ASTNode ast;
-    private int hashCode = -1;
-    private boolean hashCodeComputed = false;
-
-    public HashableASTNode(ASTNode ast) {
-      this.ast = ast;
-    }
-
-    public void setAST(ASTNode ast) {
-      this.ast = ast;
-      hashCodeComputed = false;
-    }
-
-    public ASTNode getAST() {
-      return ast;
-    }
-
-    @Override
-    public int hashCode() {
-      if (!hashCodeComputed) {
-        hashCode = getString(ast).hashCode();
-        hashCodeComputed = true;
-      }
-      return hashCode;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-      return o instanceof HashableASTNode && this.hashCode() == o.hashCode() && getString(this.getAST())
-        .trim().equalsIgnoreCase(getString(((HashableASTNode) o).getAST()).trim());
-    }
-  }
 
   private Map<HashableASTNode, ASTNode> innerToOuterASTs = new HashMap<>();
+  private AliasDecider aliasDecider = new DefaultAliasDecider();
 
   SingleFactMultiStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery,
     CubeQueryContext query, QueryAST ast)
@@ -174,7 +135,7 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
       ASTNode innerSelectASTWithoutAlias = copyAST(astNode);
       ASTNode innerSelectExprAST = new ASTNode(new CommonToken(HiveParser.TOK_SELEXPR));
       innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
-      String alias = decideAlias(astNode);
+      String alias = aliasDecider.decideAlias(astNode);
       ASTNode aliasNode = new ASTNode(new CommonToken(Identifier, alias));
       innerSelectExprAST.addChild(aliasNode);
       addToInnerSelectAST(innerSelectExprAST);
@@ -192,7 +153,7 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
       ASTNode innerSelectASTWithoutAlias = copyAST(astNode);
       ASTNode innerSelectExprAST = new ASTNode(new CommonToken(HiveParser.TOK_SELEXPR));
       innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
-      String alias = decideAlias(astNode);
+      String alias = aliasDecider.decideAlias(astNode);
       ASTNode aliasNode = new ASTNode(new CommonToken(Identifier, alias));
       innerSelectExprAST.addChild(aliasNode);
       addToInnerSelectAST(innerSelectExprAST);
@@ -249,11 +210,6 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     return child;
   }
 
-  private String decideAlias(Tree child) {
-    // Can add intelligence in aliases someday. Not required though :)
-    return "alias" + (aliasCounter++);
-  }
-
   private static ArrayList<HQLContextInterface> getUnionContexts(CandidateFact fact, Map<Dimension, CandidateDim>
     dimsToQuery, CubeQueryContext query, QueryAST ast)
     throws LensException {

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 57a15e2..8aab777 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -23,6 +23,7 @@ import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.metadata.DateUtil.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.MISSING_PARTITIONS;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
+import static org.apache.lens.cube.parse.TestCubeRewriter.compareContains;
 import static org.apache.lens.cube.parse.TestCubeRewriter.compareQueries;
 
 import static org.apache.hadoop.hive.ql.parse.HiveParser.KW_AND;
@@ -188,8 +189,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 =
       getExpectedQuery(cubeName, "select round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", null,
         null, getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     String lower = hqlQuery.toLowerCase();
     assertTrue(lower.startsWith("select mq2.roundedmsr2 roundedmsr2, mq1.msr12 msr12 from ")
       || lower.startsWith("select mq1.roundedmsr2 roundedmsr2, mq2.msr12 msr12 from "), hqlQuery);
@@ -206,8 +207,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 = getExpectedQuery(cubeName,
         "select basecube.dim1 as `dim1`, round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", null,
         " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     String lower = hqlQuery.toLowerCase();
     assertTrue(
       lower.startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, mq2.roundedmsr2 roundedmsr2, mq1.msr12 msr12 from ")
@@ -229,8 +230,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 = getExpectedQuery(cubeName,
         "select basecube.dim1 as `dim1`, round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", null,
         " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     String lower = hqlQuery.toLowerCase();
     assertTrue(
       lower.startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, mq2.roundedmsr2 roundedmsr2, mq1.msr12 msr12 from ")
@@ -254,8 +255,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 = getExpectedQuery(cubeName,
             "select basecube.dim1 as `dim1`, round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", null,
             " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     String lower = hqlQuery.toLowerCase();
     assertTrue(
             lower.startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, mq2.roundedmsr2 roundedmsr2, "
@@ -278,8 +279,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 = getExpectedQuery(cubeName,
         "select basecube.dim1 as `dim1`, round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", null,
         " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     String lower = hqlQuery.toLowerCase();
     assertTrue(
       lower.startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, mq2.msr12 msr12, mq1.roundedmsr2 roundedmsr2 from ")
@@ -305,9 +306,9 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected3 =
       getExpectedQuery(cubeName, "select basecube.dim1 as `dim1`, max(basecube.msr13) as `msr13` FROM ", null,
         " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, "c1_testfact3_base"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
-    TestCubeRewriter.compareContains(expected3, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    compareContains(expected3, hqlQuery);
     assertTrue(
       hqlQuery.toLowerCase().startsWith(
         "select coalesce(mq1.dim1, mq2.dim1, mq3.dim1) dim1, mq1.msr12 msr12,"
@@ -342,8 +343,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
         cubeName,
         "select basecube.dim1 as `dim1`, basecube.dim11 as `dim11`, round(sum(basecube.msr2)/1000) as `roundedmsr2` "
         + "FROM ", null, " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
       "select coalesce(mq1.dim1, mq2.dim1) dim1, coalesce(mq1.dim11, mq2.dim11) dim11,"
         + " mq1.msr12 msr12, mq2.roundedmsr2 roundedmsr2 from ")
@@ -365,8 +366,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 = getExpectedQuery(cubeName,
         "select basecube.dim1 as `dim1`, round(basecube.msr2/1000) as `roundedmsr2` FROM ", null, null,
         getWhereForHourly2days(cubeName, "C1_testfact1_raw_base"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
       "select coalesce(mq1.dim1, mq2.dim1) dim1, mq1.msr11 msr11, mq2.roundedmsr2 roundedmsr2 from ")
       || hqlQuery.toLowerCase().startsWith(
@@ -387,8 +388,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 =
       getExpectedQuery(cubeName, "select basecube.dim1 as `d1`, round(sum(basecube.msr2)/1000) as `m2` FROM ", null,
         " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
       "select coalesce(mq1.d1, mq2.d1) d1, mq2.expr2 `my msr12`, mq1.m2 m2 from ")
       ||
@@ -407,8 +408,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 =
       getExpectedQuery(cubeName, "select basecube.dim1 as `dim1`, avg(basecube.msr2)) as `msr2` FROM ", null,
         " group by basecube.dim1", getWhereForHourly2days(cubeName, "C1_testfact1_raw_base"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
       "select coalesce(mq1.dim1, mq2.dim1) dim1, mq2.msr12 msr12, mq1.msr2 msr2 from ")
       || hqlQuery.toLowerCase().startsWith(
@@ -430,8 +431,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
         "select dim2chain.name as `name`, round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", " JOIN " + getDbName()
             + "c1_testdim2tbl dim2chain ON basecube.dim2 = " + " dim2chain.id and (dim2chain.dt = 'latest') ", null,
         " group by dim2chain.name", null, getWhereForHourly2days(cubeName, "C1_testfact1_raw_base"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
       "select coalesce(mq1.name, mq2.name) name, mq2.msr12 msr12, mq1.roundedmsr2 roundedmsr2 from ")
       || hqlQuery.toLowerCase().startsWith(
@@ -451,8 +452,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 = getExpectedQuery(cubeName,
         "select basecube.dim2 as `dim2`, round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", null,
         " group by basecube.dim2", getWhereForHourly2days(cubeName, "C1_testfact1_raw_base"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
       "select coalesce(mq1.dim2, mq2.dim2) dim2, mq2.msr13 msr13, mq1.roundedmsr2 roundedmsr2 from ")
       || hqlQuery.toLowerCase().startsWith(
@@ -480,8 +481,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
           + " round(sum(basecube.msr2)/1000) as `msr2` FROM ", null,
         " group by basecube.dim1 != 'x' AND basecube.dim2 != 10",
         getWhereForHourly2days(cubeName, "C1_testfact1_raw_base"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase()
       .startsWith("select coalesce(mq1.booleancut, mq2.booleancut) booleancut, mq2.msr2 msr2,"
         + " mq1.expr3 expr3 from ")
@@ -570,4 +571,198 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
       }
     }
   }
+  @Test
+  public void testMultiFactQueryWithHaving() throws Exception {
+
+    String hqlQuery, expected1, expected2;
+    String endSubString = "mq2 on mq1.dim1 <=> mq2.dim1 AND mq1.dim11 <=> mq2.dim11";
+    String joinSubString = "mq1 full outer join ";
+
+    // only One having clause, that too answerable from one fact
+    hqlQuery = rewrite("select dim1, dim11, msr12 from basecube where " + TWO_DAYS_RANGE
+      + "having roundedmsr2 > 0", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, sum(basecube.msr12) as msr12 FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 having round(sum(basecube.msr2)/1000) > 0",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+
+    assertTrue(hqlQuery.toLowerCase().contains("having"));
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, "
+      + "coalesce(mq1.dim11, mq2.dim11) dim11, mq2.msr12 msr12 from ")
+      || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, coalesce(mq1.dim11, mq2.dim11) "
+        + "dim11, mq1.msr12 msr12 from "), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString), hqlQuery);
+
+    // Two having clause, one from each fact.
+    hqlQuery = rewrite("select dim1, dim11, msr12, roundedmsr2 from basecube where " + TWO_DAYS_RANGE
+      + "having msr12 > 2 and roundedmsr2 > 0", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, sum(basecube.msr12) as msr12 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 HAVING sum(basecube.msr12) > 2",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, round(sum(basecube.msr2)/1000) as roundedmsr2 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 HAVING round(sum(basecube.msr2)/1000) > 0",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, "
+      + "coalesce(mq1.dim11, mq2.dim11) dim11, mq1.msr12 msr12, mq2.roundedmsr2 roundedmsr2 from ")
+      || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, "
+        + "coalesce(mq1.dim11, mq2.dim11) dim11, mq2.msr12 msr12, mq1.roundedmsr2 roundedmsr2 from "), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString), hqlQuery);
+
+    // Two having clauses and one complex expression in having which needs to be split over the two facts
+    // And added as where clause outside
+    hqlQuery = rewrite("select dim1, dim11, msr12, roundedmsr2 from basecube where " + TWO_DAYS_RANGE
+      + "having flooredmsr12+roundedmsr2 <= 1000 and msr12 > 2 and roundedmsr2 > 0", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, sum(basecube.msr12) as msr12 , "
+        + "floor(sum(basecube.msr12)) as alias0 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 HAVING sum(basecube.msr12) > 2",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, "
+      + "coalesce(mq1.dim11, mq2.dim11) dim11, mq2.msr12 msr12, mq1.roundedmsr2 roundedmsr2 from ")
+      || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, "
+        + "coalesce(mq1.dim11, mq2.dim11) dim11, mq1.msr12 msr12, mq2.roundedmsr2 roundedmsr2 from "), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString + " WHERE (( alias0  +  roundedmsr2 ) <=  1000 )"), hqlQuery);
+
+    // No push-down-able having clauses.
+    hqlQuery = rewrite("select dim1, dim11, msr12, roundedmsr2 from basecube where " + TWO_DAYS_RANGE
+      + "having flooredmsr12+roundedmsr2 <= 1000", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, sum(basecube.msr12) as msr12, "
+        + "floor(sum(( basecube . msr12 ))) as `alias0` FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, round(sum(basecube.msr2)/1000) as roundedmsr2 FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+
+    assertFalse(hqlQuery.toLowerCase().contains("having"));
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, "
+      + "coalesce(mq1.dim11, mq2.dim11) dim11, mq2.msr12 msr12, mq1.roundedmsr2 roundedmsr2 from ")
+      || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, coalesce(mq1.dim11, mq2.dim11) "
+        + "dim11, mq1.msr12 msr12, mq2.roundedmsr2 roundedmsr2 from "), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString + " WHERE (( alias0  +  roundedmsr2 ) <=  1000 )"), hqlQuery);
+
+    // function over expression of two functions over measures
+    hqlQuery = rewrite("select dim1, dim11, msr12, roundedmsr2 from basecube where " + TWO_DAYS_RANGE
+      + "having round(flooredmsr12+roundedmsr2) <= 1000", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, sum(basecube.msr12) as msr12, "
+        + "floor(sum(( basecube . msr12 ))) as `alias0` FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, round(sum(basecube.msr2)/1000) as roundedmsr2 FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+
+    assertFalse(hqlQuery.toLowerCase().contains("having"));
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, "
+      + "coalesce(mq1.dim11, mq2.dim11) dim11, mq2.msr12 msr12, mq1.roundedmsr2 roundedmsr2 from ")
+      || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, coalesce(mq1.dim11, mq2.dim11) "
+        + "dim11, mq1.msr12 msr12, mq2.roundedmsr2 roundedmsr2 from "), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString + " WHERE (round(( alias0  +  roundedmsr2 )) <=  1000 )"), hqlQuery);
+
+
+    // Following test cases only select dimensions, and all the measures are in having.
+    // Mostly tests follow the same pattern as the above tests,
+    // The extra thing to test is the inclusion of sub-expressions in select clauses.
+
+
+    hqlQuery = rewrite("select dim1, dim11 from basecube where " + TWO_DAYS_RANGE
+      + "having msr12 > 2 and roundedmsr2 > 0", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 HAVING sum(basecube.msr12) > 2",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 HAVING round(sum(basecube.msr2)/1000) > 0",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+    String begin = "select coalesce(mq1.dim1, mq2.dim1) dim1, coalesce(mq1.dim11, mq2.dim11) dim11 from ";
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith(begin), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString) && hqlQuery.endsWith(endSubString), hqlQuery);
+
+    hqlQuery = rewrite("select dim1, dim11 from basecube where " + TWO_DAYS_RANGE
+      + "having flooredmsr12+roundedmsr2 <= 1000", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, "
+        + "floor(sum(basecube.msr12)) as alias0 FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, round(sum(basecube.msr2/1000)) as alias1 FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+
+    assertFalse(hqlQuery.toLowerCase().contains("having"));
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith(begin), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString + " WHERE (( alias0  +  alias1 ) <=  1000 )"), hqlQuery);
+
+    hqlQuery = rewrite("select dim1, dim11 from basecube where " + TWO_DAYS_RANGE
+      + "having msr12 > 2 and roundedmsr2 > 0 and flooredmsr12+roundedmsr2 <= 1000", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, "
+        + "floor(sum(( basecube . msr12 ))) as `alias0` FROM ",
+      null, " group by basecube.dim1, basecube.dim11 having sum(basecube.msr12) > 2",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, round(sum(basecube.msr2)/1000) as alias1 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 having round(sum(basecube.msr2)/1000) > 0",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith(begin), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString + " WHERE (( alias0  +  alias1 ) <=  1000 )"), hqlQuery);
+
+    hqlQuery = rewrite("select dim1, dim11 from basecube where " + TWO_DAYS_RANGE
+      + "having msr12 > 2 or roundedmsr2 > 0 or flooredmsr12+roundedmsr2 <= 1000", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, "
+        + "sum(basecube.msr12) as alias0, floor(sum(basecube.msr12)) as alias2 FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, round(sum(basecube.msr2)/1000) as alias1 FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+    String havingToWhere = " WHERE ((( alias0  >  2 ) or ( alias1  >  0 )) or (( alias2  +  alias1 ) <=  1000 ))";
+
+    assertFalse(hqlQuery.toLowerCase().contains("having"));
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith(begin), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString + havingToWhere), hqlQuery);
+  }
 }


[08/51] [abbrv] lens git commit: LENS-887 : Add exception handling over event process threads and increase pool size for QueryEndNotifier and ResultFormatter

Posted by de...@apache.org.
LENS-887 : Add exception handling over event process threads and increase pool size for QueryEndNotifier and ResultFormatter


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/73f92430
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/73f92430
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/73f92430

Branch: refs/heads/current-release-line
Commit: 73f92430c70664cf5b8c63ec9b174a4a1b27d2ad
Parents: 36166a2
Author: Puneet Gupta <pu...@gmail.com>
Authored: Tue Dec 15 18:22:40 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Dec 15 18:22:40 2015 +0530

----------------------------------------------------------------------
 .../server/api/events/AsyncEventListener.java   | 50 +++++++++-----
 .../apache/lens/server/EventServiceImpl.java    |  9 ++-
 .../lens/server/query/QueryEndNotifier.java     | 72 +++++++++++---------
 .../lens/server/query/ResultFormatter.java      |  5 ++
 .../lens/server/query/TestEventService.java     | 45 ++++++++++++
 5 files changed, 131 insertions(+), 50 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/73f92430/lens-server-api/src/main/java/org/apache/lens/server/api/events/AsyncEventListener.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/events/AsyncEventListener.java b/lens-server-api/src/main/java/org/apache/lens/server/api/events/AsyncEventListener.java
index 547c008..84728e5 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/events/AsyncEventListener.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/events/AsyncEventListener.java
@@ -22,12 +22,18 @@ import java.util.concurrent.*;
 
 import org.apache.lens.server.api.error.LensException;
 
+import org.apache.commons.lang3.concurrent.BasicThreadFactory;
+
+import lombok.AccessLevel;
+import lombok.Getter;
+import lombok.extern.slf4j.Slf4j;
 /**
  * Event listeners should implement this class if they wish to process events asynchronously. This should be used when
  * event processing can block, or is computationally intensive.
  *
  * @param <T> the generic type
  */
+@Slf4j
 public abstract class AsyncEventListener<T extends LensEvent> implements LensEventListener<T> {
 
   /**
@@ -41,49 +47,57 @@ public abstract class AsyncEventListener<T extends LensEvent> implements LensEve
   protected final BlockingQueue<Runnable> eventQueue;
 
   /**
+   * Name of this Asynchronous Event Listener. Will be used for logging and to name the threads in thread pool that
+   * allow asynchronous handling of events. If required, Sub Classes can override <code>getName</code> method to
+   * provide more appropriate name.
+   *
+   * Default value is the class Name (Example QueryEndNotifier, ResultFormatter, etc)
+   */
+  @Getter(AccessLevel.PROTECTED)
+  private final String name = this.getClass().getSimpleName();
+
+  /**
    * Create a single threaded event listener with an unbounded queue, with daemon threads.
    */
   public AsyncEventListener() {
-    this(1);
+    this(1, 1);
   }
 
   /**
    * Create a event listener with poolSize threads with an unbounded queue and daemon threads.
    *
    * @param poolSize the pool size
+   * @param maxPoolSize the max pool size
    */
-  public AsyncEventListener(int poolSize) {
-    this(poolSize, -1, 10, true);
+  public AsyncEventListener(int poolSize, int maxPoolSize) {
+    this(poolSize, maxPoolSize, -1, 10, true);
   }
 
   /**
    * Create an asynchronous event listener which uses a thread poool to process events.
    *
    * @param poolSize       size of the event processing pool
+   * @param maxPoolSize    the max pool size
    * @param maxQueueSize   max size of the event queue, if this is non positive, then the queue is unbounded
    * @param timeOutSeconds time out in seconds when an idle thread is destroyed
    * @param isDaemon       if the threads used to process should be daemon threads,
    *                       if false, then implementation should call stop()
    *                       to stop the thread pool
    */
-  public AsyncEventListener(int poolSize, int maxQueueSize, long timeOutSeconds, final boolean isDaemon) {
+  public AsyncEventListener(int poolSize, int maxPoolSize, int maxQueueSize, long timeOutSeconds,
+      final boolean isDaemon) {
     if (maxQueueSize <= 0) {
       eventQueue = new LinkedBlockingQueue<Runnable>();
     } else {
       eventQueue = new ArrayBlockingQueue<Runnable>(maxQueueSize);
     }
 
-    processor = new ThreadPoolExecutor(poolSize, poolSize, timeOutSeconds, TimeUnit.SECONDS, eventQueue,
-      new ThreadFactory() {
-        @Override
-        public Thread newThread(Runnable runnable) {
-          Thread th = new Thread(runnable);
-          th.setName("event_processor_thread");
-          th.setDaemon(isDaemon);
-          return th;
-        }
-      });
-    processor.allowCoreThreadTimeOut(true);
+    ThreadFactory factory = new BasicThreadFactory.Builder()
+      .namingPattern(getName()+"_AsyncThread-%d")
+      .daemon(isDaemon)
+      .priority(Thread.NORM_PRIORITY)
+      .build();
+    processor = new ThreadPoolExecutor(poolSize, maxPoolSize, timeOutSeconds, TimeUnit.SECONDS, eventQueue, factory);
   }
 
   /**
@@ -98,7 +112,11 @@ public abstract class AsyncEventListener<T extends LensEvent> implements LensEve
       processor.execute(new Runnable() {
         @Override
         public void run() {
-          process(event);
+          try {
+            process(event);
+          } catch (Throwable e) {
+            log.error("{} Failed to process event {}", getName(), event, e);
+          }
         }
       });
     } catch (RejectedExecutionException rejected) {

http://git-wip-us.apache.org/repos/asf/lens/blob/73f92430/lens-server/src/main/java/org/apache/lens/server/EventServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/EventServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/EventServiceImpl.java
index a276828..369885d 100644
--- a/lens-server/src/main/java/org/apache/lens/server/EventServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/EventServiceImpl.java
@@ -21,6 +21,7 @@ package org.apache.lens.server;
 import java.util.*;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
+import java.util.concurrent.ThreadFactory;
 
 import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.error.LensException;
@@ -29,6 +30,7 @@ import org.apache.lens.server.api.events.LensEventListener;
 import org.apache.lens.server.api.events.LensEventService;
 import org.apache.lens.server.api.health.HealthStatus;
 
+import org.apache.commons.lang3.concurrent.BasicThreadFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hive.service.AbstractService;
 
@@ -64,8 +66,13 @@ public class EventServiceImpl extends AbstractService implements LensEventServic
   @Override
   public synchronized void init(HiveConf hiveConf) {
     int numProcs = Runtime.getRuntime().availableProcessors();
+    ThreadFactory factory = new BasicThreadFactory.Builder()
+      .namingPattern("Event_Service_Thread-%d")
+      .daemon(false)
+      .priority(Thread.NORM_PRIORITY)
+      .build();
     eventHandlerPool = Executors.newFixedThreadPool(hiveConf.getInt(LensConfConstants.EVENT_SERVICE_THREAD_POOL_SIZE,
-      numProcs));
+      numProcs), factory);
     super.init(hiveConf);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/73f92430/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java
index 110624a..ca00b4d 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java
@@ -77,12 +77,17 @@ public class QueryEndNotifier extends AsyncEventListener<QueryEnded> {
 
   private final LogSegregationContext logSegregationContext;
 
+  /** QueryEndNotifier core and max pool size */
+  private static final int CORE_POOL_SIZE = 2;
+  private static final int MAX_POOL_SIZE = 5;
+
   /** Instantiates a new query end notifier.
    *
    * @param queryService the query service
    * @param hiveConf     the hive conf */
   public QueryEndNotifier(QueryExecutionServiceImpl queryService, HiveConf hiveConf,
     @NonNull final LogSegregationContext logSegregationContext) {
+    super(CORE_POOL_SIZE, MAX_POOL_SIZE);
     this.queryService = queryService;
     HiveConf conf = hiveConf;
     from = conf.get(MAIL_FROM_ADDRESS);
@@ -113,23 +118,30 @@ public class QueryEndNotifier extends AsyncEventListener<QueryEnded> {
 
     boolean whetherMailNotify = Boolean.parseBoolean(queryContext.getConf().get(QUERY_MAIL_NOTIFY,
       WHETHER_MAIL_NOTIFY_DEFAULT));
-
     if (!whetherMailNotify) {
       return;
     }
 
-    String queryName = queryContext.getQueryName();
-    String mailSubject = "Query " + (StringUtils.isBlank(queryName) ? "" : (queryName + " "))
-      + queryContext.getStatus().getStatus() + ": " + event.getQueryHandle();
+    try {
+      //Create and Send EMAIL
+      String queryName = queryContext.getQueryName();
+      String mailSubject = "Query " + (StringUtils.isBlank(queryName) ? "" : (queryName + " "))
+        + queryContext.getStatus().getStatus() + ": " + event.getQueryHandle();
 
-    String mailMessage = createMailMessage(queryContext);
+      String mailMessage = createMailMessage(queryContext);
 
-    String to = queryContext.getSubmittedUser() + "@" + queryService.getServerDomain();
+      String to = queryContext.getSubmittedUser() + "@" + queryService.getServerDomain();
 
-    String cc = queryContext.getConf().get(QUERY_RESULT_EMAIL_CC, QUERY_RESULT_DEFAULT_EMAIL_CC);
+      String cc = queryContext.getConf().get(QUERY_RESULT_EMAIL_CC, QUERY_RESULT_DEFAULT_EMAIL_CC);
 
-    log.info("Sending completion email for query handle: {}", event.getQueryHandle());
-    sendMail(host, port, new Email(from, to, cc, mailSubject, mailMessage), mailSmtpTimeout, mailSmtpConnectionTimeout);
+      log.info("Sending completion email for query handle: {}", event.getQueryHandle());
+      sendMail(host, port, new Email(from, to, cc, mailSubject, mailMessage), mailSmtpTimeout,
+          mailSmtpConnectionTimeout);
+    } catch (Exception e) {
+      MetricsService metricsService = LensServices.get().getService(MetricsService.NAME);
+      metricsService.incrCounter(QueryEndNotifier.class, EMAIL_ERROR_COUNTER);
+      log.error("Error sending query end email", e);
+    }
   }
 
   /** Creates the mail message.
@@ -184,38 +196,32 @@ public class QueryEndNotifier extends AsyncEventListener<QueryEnded> {
    * @param mailSmtpTimeout           the mail smtp timeout
    * @param mailSmtpConnectionTimeout the mail smtp connection timeout */
   public static void sendMail(String host, String port,
-    Email email, int mailSmtpTimeout, int mailSmtpConnectionTimeout) {
+    Email email, int mailSmtpTimeout, int mailSmtpConnectionTimeout) throws Exception{
     Properties props = System.getProperties();
     props.put("mail.smtp.host", host);
     props.put("mail.smtp.port", port);
     props.put("mail.smtp.timeout", mailSmtpTimeout);
     props.put("mail.smtp.connectiontimeout", mailSmtpConnectionTimeout);
     Session session = Session.getDefaultInstance(props, null);
-    try {
-      MimeMessage message = new MimeMessage(session);
-      message.setFrom(new InternetAddress(email.getFrom()));
-      for (String recipient : email.getTo().trim().split("\\s*,\\s*")) {
-        message.addRecipients(Message.RecipientType.TO, InternetAddress.parse(recipient));
-      }
-      if (email.getCc() != null && email.getCc().length() > 0) {
-        for (String recipient : email.getCc().trim().split("\\s*,\\s*")) {
-          message.addRecipients(Message.RecipientType.CC, InternetAddress.parse(recipient));
-        }
+    MimeMessage message = new MimeMessage(session);
+    message.setFrom(new InternetAddress(email.getFrom()));
+    for (String recipient : email.getTo().trim().split("\\s*,\\s*")) {
+      message.addRecipients(Message.RecipientType.TO, InternetAddress.parse(recipient));
+    }
+    if (email.getCc() != null && email.getCc().length() > 0) {
+      for (String recipient : email.getCc().trim().split("\\s*,\\s*")) {
+        message.addRecipients(Message.RecipientType.CC, InternetAddress.parse(recipient));
       }
-      message.setSubject(email.getSubject());
-      message.setSentDate(new Date());
+    }
+    message.setSubject(email.getSubject());
+    message.setSentDate(new Date());
 
-      MimeBodyPart messagePart = new MimeBodyPart();
-      messagePart.setText(email.getMessage());
-      Multipart multipart = new MimeMultipart();
+    MimeBodyPart messagePart = new MimeBodyPart();
+    messagePart.setText(email.getMessage());
+    Multipart multipart = new MimeMultipart();
 
-      multipart.addBodyPart(messagePart);
-      message.setContent(multipart);
-      Transport.send(message);
-    } catch (Exception e) {
-      MetricsService metricsService = LensServices.get().getService(MetricsService.NAME);
-      metricsService.incrCounter(QueryEndNotifier.class, EMAIL_ERROR_COUNTER);
-      log.error("Error sending query end email", e);
-    }
+    multipart.addBodyPart(messagePart);
+    message.setContent(multipart);
+    Transport.send(message);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/73f92430/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java b/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java
index f568b17..9955278 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java
@@ -46,6 +46,10 @@ public class ResultFormatter extends AsyncEventListener<QueryExecuted> {
   /** The query service. */
   QueryExecutionServiceImpl queryService;
 
+  /** ResultFormatter core and max pool size */
+  private static final int CORE_POOL_SIZE = 5;
+  private static final int MAX_POOL_SIZE = 10;
+
   private final LogSegregationContext logSegregationContext;
 
   /**
@@ -54,6 +58,7 @@ public class ResultFormatter extends AsyncEventListener<QueryExecuted> {
    * @param queryService the query service
    */
   public ResultFormatter(QueryExecutionServiceImpl queryService, @NonNull LogSegregationContext logSegregationContext) {
+    super(CORE_POOL_SIZE, MAX_POOL_SIZE);
     this.queryService = queryService;
     this.logSegregationContext = logSegregationContext;
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/73f92430/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
index 702a529..a2ca17f 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
@@ -20,6 +20,9 @@ package org.apache.lens.server.query;
 
 import static org.testng.Assert.*;
 
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
 import java.util.UUID;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
@@ -503,4 +506,46 @@ public class TestEventService {
 
   }
 
+  @Test
+  public void testAysncEventListenerPoolThreads(){
+    AsyncEventListener<QuerySuccess> ayncListener = new DummyAsncEventListener();
+    for(int i=0; i<10; i++){
+      try {
+        //A pool thread is created each time an event is submitted until core pool size is reached which is 5
+        //for this test case.  @see org.apache.lens.server.api.events.AsyncEventListener.processor
+        ayncListener.onEvent(null);
+      } catch (LensException e) {
+        assert(false); //Not Expected
+      }
+    }
+
+    //Verify the core pool Threads after the events have been fired
+    ThreadGroup currentTG = Thread.currentThread().getThreadGroup();
+    int count = currentTG.activeCount();
+    Thread[] threads = new Thread[count];
+    currentTG.enumerate(threads);
+    Set<String> aysncThreadNames = new HashSet<String>();
+    for(Thread t : threads){
+      if (t.getName().contains("DummyAsncEventListener_AsyncThread")){
+        aysncThreadNames.add(t.getName());
+      }
+    }
+    assertTrue(aysncThreadNames.containsAll(Arrays.asList(
+      "DummyAsncEventListener_AsyncThread-1",
+      "DummyAsncEventListener_AsyncThread-2",
+      "DummyAsncEventListener_AsyncThread-3",
+      "DummyAsncEventListener_AsyncThread-4",
+      "DummyAsncEventListener_AsyncThread-5")));
+  }
+
+  private static class DummyAsncEventListener extends AsyncEventListener<QuerySuccess> {
+    public DummyAsncEventListener(){
+      super(5, 10); //core pool = 5 and max Pool size =10
+    }
+    @Override
+    public void process(QuerySuccess event) {
+      throw new RuntimeException("Simulated Exception");
+    }
+  }
+
 }


[40/51] [abbrv] lens git commit: LENS-937 : Add ReferencedDimAtrribute class for backward compatibility

Posted by de...@apache.org.
LENS-937 : Add ReferencedDimAtrribute class for backward compatibility


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/f1d6e3fa
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/f1d6e3fa
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/f1d6e3fa

Branch: refs/heads/current-release-line
Commit: f1d6e3fa0404f113f6f8842afeba7c984adb2b44
Parents: b3f993d
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Fri Jan 29 15:03:51 2016 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Fri Jan 29 15:03:51 2016 +0530

----------------------------------------------------------------------
 .../cube/metadata/ReferencedDimAtrribute.java   | 35 ++++++++++++++++++++
 1 file changed, 35 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/f1d6e3fa/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
new file mode 100644
index 0000000..f0ff1ad
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/ReferencedDimAtrribute.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata;
+
+import java.util.*;
+
+import org.apache.lens.server.api.error.LensException;
+
+import lombok.EqualsAndHashCode;
+import lombok.ToString;
+
+@EqualsAndHashCode(callSuper = true)
+@ToString(callSuper = true)
+public class ReferencedDimAtrribute extends ReferencedDimAttribute {
+
+  public ReferencedDimAtrribute(String name, Map<String, String> props) throws LensException {
+    super(name, props);
+  }
+}


[11/51] [abbrv] lens git commit: LENS-903 : No candidate dim available exception should contain only brief error - added missing file

Posted by de...@apache.org.
LENS-903 : No candidate dim available exception should contain only brief error - added missing file


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/d6aeecc7
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/d6aeecc7
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/d6aeecc7

Branch: refs/heads/current-release-line
Commit: d6aeecc7306d9cd37dacd53caa0dfd842ca48bdc
Parents: b84cb2c
Author: Sushil Mohanty <su...@apache.org>
Authored: Thu Dec 17 18:22:26 2015 +0530
Committer: Sushil Mohanty <su...@apache.org>
Committed: Thu Dec 17 18:22:26 2015 +0530

----------------------------------------------------------------------
 .../error/NoCandidateDimAvailableException.java | 47 ++++++++++++++++++++
 1 file changed, 47 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/d6aeecc7/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateDimAvailableException.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateDimAvailableException.java b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateDimAvailableException.java
new file mode 100644
index 0000000..ef76dc6
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateDimAvailableException.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.error;
+
+import org.apache.lens.cube.metadata.CubeDimensionTable;
+import org.apache.lens.cube.parse.PruneCauses;
+import org.apache.lens.server.api.error.LensException;
+
+public class NoCandidateDimAvailableException extends LensException {
+
+  private final PruneCauses<CubeDimensionTable> briefAndDetailedError;
+
+  public NoCandidateDimAvailableException(PruneCauses<CubeDimensionTable> briefAndDetailedError) {
+    super(LensCubeErrorCode.NO_CANDIDATE_DIM_AVAILABLE.getLensErrorInfo(), briefAndDetailedError.getBriefCause());
+    this.briefAndDetailedError = briefAndDetailedError;
+  }
+
+  public PruneCauses.BriefAndDetailedError getJsonMessage() {
+    return briefAndDetailedError.toJsonObject();
+  }
+
+  @Override
+  public int compareTo(LensException e) {
+    //Compare the max CandidateTablePruneCode coming from different instances.
+    if (e instanceof NoCandidateDimAvailableException) {
+      return briefAndDetailedError.getMaxCause().compareTo(
+              ((NoCandidateDimAvailableException) e).briefAndDetailedError.getMaxCause());
+    }
+    return super.compareTo(e);
+  }
+}


[33/51] [abbrv] lens git commit: LENS-926 : Fixes Chain ref column from bridge table to be resolved correctly

Posted by de...@apache.org.
LENS-926 : Fixes Chain ref column from bridge table to be resolved correctly


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/91ccec77
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/91ccec77
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/91ccec77

Branch: refs/heads/current-release-line
Commit: 91ccec775bfff7732da41b6a6d13e01752750f7f
Parents: 5d2dccb
Author: Amareshwari Sriramadasu <am...@gmail.com>
Authored: Thu Jan 21 17:43:26 2016 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Thu Jan 21 17:43:26 2016 +0530

----------------------------------------------------------------------
 .../cube/parse/DenormalizationResolver.java     |  46 +--
 .../lens/cube/parse/join/AutoJoinContext.java   |   2 +-
 .../apache/lens/cube/parse/CubeTestSetup.java   |  20 +-
 .../lens/cube/parse/TestBridgeTableQueries.java | 293 +++++++++++++++++++
 .../lens/cube/parse/TestCubeRewriter.java       |   3 +-
 .../lens/cube/parse/TestJoinResolver.java       | 225 ++------------
 6 files changed, 354 insertions(+), 235 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/91ccec77/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index c83b9ac..f2dc2e5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hive.ql.parse.HiveParser;
 
 import org.antlr.runtime.CommonToken;
 
+import lombok.Getter;
 import lombok.ToString;
 import lombok.extern.slf4j.Slf4j;
 
@@ -67,14 +68,13 @@ public class DenormalizationResolver implements ContextRewriter {
 
   @ToString
   public static class PickedReference {
-    TableReference reference;
+    @Getter
     ChainRefCol chainRef;
     String srcAlias;
     String pickedFor;
 
-    PickedReference(TableReference reference, String srcAlias, String pickedFor) {
+    PickedReference(String srcAlias, String pickedFor) {
       this.srcAlias = srcAlias;
-      this.reference = reference;
       this.pickedFor = pickedFor;
     }
 
@@ -83,38 +83,22 @@ public class DenormalizationResolver implements ContextRewriter {
       this.chainRef = chainRef;
       this.pickedFor = pickedFor;
     }
-
-    String getDestTable() {
-      if (chainRef != null) {
-        return chainRef.getChainName();
-      }
-      return reference.getDestTable();
-    }
-
-    String getRefColumn() {
-      if (chainRef != null) {
-        return chainRef.getRefColumn();
-      }
-      return reference.getDestColumn();
-    }
   }
 
   public static class DenormalizationContext {
     // map of column name to all references
-    private Map<String, Set<ReferencedQueriedColumn>> referencedCols =
-      new HashMap<String, Set<ReferencedQueriedColumn>>();
+    private Map<String, Set<ReferencedQueriedColumn>> referencedCols = new HashMap<>();
 
     // candidate table name to all the references columns it needs
-    private Map<String, Set<ReferencedQueriedColumn>> tableToRefCols =
-      new HashMap<String, Set<ReferencedQueriedColumn>>();
+    private Map<String, Set<ReferencedQueriedColumn>> tableToRefCols = new HashMap<>();
 
     private CubeQueryContext cubeql;
 
     // set of all picked references once all candidate tables are picked
-    private Set<PickedReference> pickedRefs = new HashSet<PickedReference>();
+    private Set<PickedReference> pickedRefs = new HashSet<>();
     // index on column name for picked references with map from column name to
     // pickedrefs
-    private Map<String, Set<PickedReference>> pickedReferences = new HashMap<String, Set<PickedReference>>();
+    private Map<String, Set<PickedReference>> pickedReferences = new HashMap<>();
 
     DenormalizationContext(CubeQueryContext cubeql) {
       this.cubeql = cubeql;
@@ -123,7 +107,7 @@ public class DenormalizationResolver implements ContextRewriter {
     void addReferencedCol(String col, ReferencedQueriedColumn refer) {
       Set<ReferencedQueriedColumn> refCols = referencedCols.get(col);
       if (refCols == null) {
-        refCols = new HashSet<ReferencedQueriedColumn>();
+        refCols = new HashSet<>();
         referencedCols.put(col, refCols);
       }
       refCols.add(refer);
@@ -144,7 +128,7 @@ public class DenormalizationResolver implements ContextRewriter {
             log.info("Adding denormalized column for column:{} for table:{}", col, table);
             Set<ReferencedQueriedColumn> refCols = tableToRefCols.get(table.getName());
             if (refCols == null) {
-              refCols = new HashSet<ReferencedQueriedColumn>();
+              refCols = new HashSet<>();
               tableToRefCols.put(table.getName(), refCols);
             }
             refCols.add(refer);
@@ -167,7 +151,7 @@ public class DenormalizationResolver implements ContextRewriter {
     private void addPickedReference(String col, PickedReference refer) {
       Set<PickedReference> refCols = pickedReferences.get(col);
       if (refCols == null) {
-        refCols = new HashSet<PickedReference>();
+        refCols = new HashSet<>();
         pickedReferences.put(col, refCols);
       }
       refCols.add(refer);
@@ -187,7 +171,7 @@ public class DenormalizationResolver implements ContextRewriter {
 
     public Set<Dimension> rewriteDenormctx(CandidateFact cfact, Map<Dimension, CandidateDim> dimsToQuery,
       boolean replaceFact) throws LensException {
-      Set<Dimension> refTbls = new HashSet<Dimension>();
+      Set<Dimension> refTbls = new HashSet<>();
 
       if (!tableToRefCols.isEmpty()) {
         // pick referenced columns for fact
@@ -206,7 +190,8 @@ public class DenormalizationResolver implements ContextRewriter {
         // Add the picked references to dimsToQuery
         for (PickedReference picked : pickedRefs) {
           if (isPickedFor(picked, cfact, dimsToQuery)) {
-            refTbls.add((Dimension) cubeql.getCubeTableForAlias(picked.getDestTable()));
+            refTbls.add((Dimension) cubeql.getCubeTableForAlias(picked.getChainRef().getChainName()));
+            cubeql.addColumnsQueried(picked.getChainRef().getChainName(), picked.getChainRef().getRefColumn());
           }
         }
       }
@@ -286,10 +271,11 @@ public class DenormalizationResolver implements ContextRewriter {
           return;
         }
         ASTNode newTableNode =
-          new ASTNode(new CommonToken(HiveParser.Identifier, query.getAliasForTableName(refered.getDestTable())));
+          new ASTNode(new CommonToken(HiveParser.Identifier, refered.getChainRef().getChainName()));
         tableNode.setChild(0, newTableNode);
 
-        ASTNode newColumnNode = new ASTNode(new CommonToken(HiveParser.Identifier, refered.getRefColumn()));
+        ASTNode newColumnNode = new ASTNode(new CommonToken(HiveParser.Identifier,
+          refered.getChainRef().getRefColumn()));
         node.setChild(1, newColumnNode);
       } else {
         // recurse down

http://git-wip-us.apache.org/repos/asf/lens/blob/91ccec77/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
index 4c30d3f..f70cbe4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
@@ -167,7 +167,7 @@ public class AutoJoinContext {
   public String getFromString(String fromTable, CandidateFact fact, Set<Dimension> qdims,
     Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql) throws LensException {
     String fromString = fromTable;
-    log.info("All paths dump:{}", cubeql.getAutoJoinCtx().getAllPaths());
+    log.info("All paths dump:{} Queried dims:{}", cubeql.getAutoJoinCtx().getAllPaths(), qdims);
     if (qdims == null || qdims.isEmpty()) {
       return fromString;
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/91ccec77/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index caea3af..100d7c9 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -945,6 +945,18 @@ public class CubeTestSetup {
     cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("userid", "int", "userid")));
     cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("xuserid", "int", "userid")));
     cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("yuserid", "int", "userid")));
+    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("xsports", "array<string>", ""),
+      "xuser sports", "xusersports", "name", null, null, null));
+    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("ysports", "array<string>", ""),
+      "yuser sports", "yusersports", "name", null, null, null));
+    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("sports", "array<string>", ""),
+      "user sports", "usersports", "name", null, null, null));
+    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("sportids", "array<int>", ""),
+      "user sports", "userInterestIds", "sport_id", null, null, null));
+    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("statecountry", "string", ""),
+      "state country", "cubestatecountry", "name", null, null, null));
+    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("citycountry", "string", ""),
+      "city country", "cubecitystatecountry", "name", null, null, null));
 
     Map<String, String> cubeProperties = new HashMap<>();
     cubeProperties.put(MetastoreUtil.getCubeTimedDimensionListKey(BASE_CUBE_NAME),
@@ -1104,9 +1116,13 @@ public class CubeTestSetup {
     cubeDimensions2.add(
       new ReferencedDimAttribute(new FieldSchema("cityStateCapital", "string", "State's capital thru city"),
         "State's capital thru city", "cityState", "capital", null, null, null));
-    client.createCube(BASE_CUBE_NAME, cubeMeasures2, cubeDimensions2, exprs, joinChains, cubeProperties);
+    Set<ExprColumn> baseExprs = new HashSet<>(exprs);
+    baseExprs.add(new ExprColumn(new FieldSchema("substrsprorts", "String", "substr of sports"), "substr sports",
+      "substr(sports, 10)"));
 
-    Map<String, String> derivedProperties = new HashMap<String, String>();
+    client.createCube(BASE_CUBE_NAME, cubeMeasures2, cubeDimensions2, baseExprs, joinChains, cubeProperties);
+
+    Map<String, String> derivedProperties = new HashMap<>();
     derivedProperties.put(MetastoreConstants.CUBE_ALL_FIELDS_QUERIABLE, "true");
     Set<String> measures = new HashSet<>();
     measures.add("msr1");

http://git-wip-us.apache.org/repos/asf/lens/blob/91ccec77/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBridgeTableQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBridgeTableQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBridgeTableQueries.java
new file mode 100644
index 0000000..e66c39b
--- /dev/null
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBridgeTableQueries.java
@@ -0,0 +1,293 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.lens.cube.parse;
+
+import static org.apache.lens.cube.metadata.DateFactory.TWO_DAYS_RANGE;
+import static org.apache.lens.cube.parse.CubeTestSetup.*;
+
+import static org.testng.Assert.assertTrue;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+public class TestBridgeTableQueries extends TestQueryRewrite {
+
+  private static HiveConf hConf = new HiveConf(TestBridgeTableQueries.class);
+
+  @BeforeTest
+  public void setupInstance() throws Exception {
+    hConf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    hConf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
+    hConf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, true);
+    hConf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, true);
+    hConf.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, false);
+    hConf.setBoolean(CubeQueryConfUtil.ENABLE_FLATTENING_FOR_BRIDGETABLES, true);
+  }
+
+  @Test
+  public void testBridgeTablesWithoutDimtablePartitioning() throws Exception {
+    String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, hConf);
+    String expected = getExpectedQuery("basecube", "select usersports.name, sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
+        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name"
+        + " from " + getDbName() + "c1_user_interests_tbl user_interests"
+        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
+        + " group by user_interests.user_id) usersports"
+        + " on userdim.id = usersports.user_id ",
+      null, "group by usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+    // run with chain ref column
+    query = "select sports, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    hqlQuery = rewrite(query, hConf);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testBridgeTablesForExprFieldWithoutDimtablePartitioning() throws Exception {
+    String query = "select substr(usersports.name, 10), sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, hConf);
+    String expected = getExpectedQuery("basecube", "select substr(usersports.name, 10), sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
+        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name"
+        + " from " + getDbName() + "c1_user_interests_tbl user_interests"
+        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
+        + " group by user_interests.user_id) usersports"
+        + " on userdim.id = usersports.user_id ",
+      null, "group by substr(( usersports . name ),  10 )", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+    // run with chain ref column
+    query = "select substrsprorts, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    hqlQuery = rewrite(query, hConf);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testFlattenBridgeTablesOFF() throws Exception {
+    Configuration conf = new Configuration(hConf);
+    conf.setBoolean(CubeQueryConfUtil.ENABLE_FLATTENING_FOR_BRIDGETABLES, false);
+    String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, conf);
+    String expected = getExpectedQuery("basecube", "select usersports.name, sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
+        + " join " + getDbName() + "c1_user_interests_tbl user_interests on userdim.id = user_interests.user_id"
+        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id",
+      null, "group by usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+    // run with chain ref column
+    query = "select sports, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    hqlQuery = rewrite(query, conf);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testFlattenBridgeTablesWithCustomAggregate() throws Exception {
+    Configuration conf = new Configuration(hConf);
+    conf.set(CubeQueryConfUtil.BRIDGE_TABLE_FIELD_AGGREGATOR, "custom_aggr");
+    String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, conf);
+    String expected = getExpectedQuery("basecube", "select usersports.name, sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
+        + " join (select user_interests.user_id as user_id,custom_aggr(usersports.name) as name"
+        + " from " + getDbName() + "c1_user_interests_tbl user_interests"
+        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
+        + " group by user_interests.user_id) usersports"
+        + " on userdim.id = usersports.user_id ",
+      null, "group by usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+    // run with chain ref column
+    query = "select sports, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    hqlQuery = rewrite(query, conf);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testBridgeTablesWithMegringChains() throws Exception {
+    String query = "select userInterestIds.sport_id, usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, hConf);
+    String expected = getExpectedQuery("basecube", "select userInterestIds.sport_id, usersports.name,"
+      + " sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c1_usertable userdim on basecube.userid = userdim.id join (select userinterestids"
+        + ".user_id as user_id,collect_set(userinterestids.sport_id) as sport_id from " + getDbName()
+        + "c1_user_interests_tbl userinterestids group by userinterestids.user_id) userinterestids on userdim.id = "
+        + "userinterestids.user_id join (select userinterestids.user_id as user_id,collect_set(usersports.name) as name"
+        + " from " + getDbName() + "c1_user_interests_tbl userinterestids join "
+        + getDbName() + "c1_sports_tbl usersports on userinterestids.sport_id = usersports.id"
+        + " group by userinterestids.user_id) usersports on userdim.id = usersports.user_id",
+       null, "group by userInterestIds.sport_id, usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+    // run with chain ref column
+    query = "select sportids, sports, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    hqlQuery = rewrite(query, hConf);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testBridgeTablesWithMultipleFacts() throws Exception {
+    String query = "select usersports.name, sum(msr2), sum(msr12) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, hConf);
+    String expected1 = getExpectedQuery("basecube",
+        "select usersports.name as `name`, sum(basecube.msr2) as `msr2` FROM ", " join " + getDbName()
+            + "c1_usertable userdim ON basecube.userid = userdim.id "
+            + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name" + " from "
+            + getDbName() + "c1_user_interests_tbl user_interests" + " join " + getDbName()
+            + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
+            + " group by user_interests.user_id) usersports" + " on userdim.id = usersports.user_id ", null,
+        "group by usersports.name", null, getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    String expected2 = getExpectedQuery("basecube",
+        "select usersports.name as `name`, sum(basecube.msr12) as `msr12` FROM ", " join " + getDbName()
+            + "c1_usertable userdim ON basecube.userid = userdim.id "
+            + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name" + " from "
+            + getDbName() + "c1_user_interests_tbl user_interests" + " join " + getDbName()
+            + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
+            + " group by user_interests.user_id) usersports" + " on userdim.id = usersports.user_id ", null,
+        "group by usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact2_base"));
+    TestCubeRewriter.compareContains(expected1, hqlQuery);
+    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    String lower = hqlQuery.toLowerCase();
+    assertTrue(
+      lower.startsWith("select coalesce(mq1.name, mq2.name) name, mq2.msr2 msr2, mq1.msr12 msr12 from ")
+      || lower.startsWith("select coalesce(mq1.name, mq2.name) name, mq1.msr2 msr2, mq2.msr12 msr12 from "), hqlQuery);
+
+    assertTrue(hqlQuery.contains("mq1 full outer join ") && hqlQuery.endsWith("mq2 on mq1.name <=> mq2.name"),
+      hqlQuery);
+
+    // run with chain ref column
+    query = "select sports, sum(msr2), sum(msr12) from basecube where " + TWO_DAYS_RANGE;
+    hqlQuery = rewrite(query, hConf);
+    expected1 = getExpectedQuery("basecube",
+      "select usersports.name as `sports`, sum(basecube.msr2) as `msr2` FROM ", " join " + getDbName()
+        + "c1_usertable userdim ON basecube.userid = userdim.id "
+        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name" + " from "
+        + getDbName() + "c1_user_interests_tbl user_interests" + " join " + getDbName()
+        + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
+        + " group by user_interests.user_id) usersports" + " on userdim.id = usersports.user_id ", null,
+      "group by usersports.name", null, getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    expected2 = getExpectedQuery("basecube",
+      "select usersports.name as `sports`, sum(basecube.msr12) as `msr12` FROM ", " join " + getDbName()
+        + "c1_usertable userdim ON basecube.userid = userdim.id "
+        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name" + " from "
+        + getDbName() + "c1_user_interests_tbl user_interests" + " join " + getDbName()
+        + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
+        + " group by user_interests.user_id) usersports" + " on userdim.id = usersports.user_id ", null,
+      "group by usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact2_base"));
+    TestCubeRewriter.compareContains(expected1, hqlQuery);
+    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    lower = hqlQuery.toLowerCase();
+    assertTrue(
+      lower.startsWith("select coalesce(mq1.sports, mq2.sports) sports, mq2.msr2 msr2, mq1.msr12 msr12 from ")
+        || lower.startsWith("select coalesce(mq1.sports, mq2.sports) sports, mq1.msr2 msr2, mq2.msr12 msr12 from "),
+      hqlQuery);
+
+    assertTrue(hqlQuery.contains("mq1 full outer join ") && hqlQuery.endsWith("mq2 on mq1.sports <=> mq2.sports"),
+      hqlQuery);
+  }
+
+  @Test
+  public void testBridgeTablesWithMultipleChains() throws Exception {
+    String query = "select usersports.name, xusersports.name, yusersports.name, sum(msr2) from basecube where "
+      + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, hConf);
+    String expected = getExpectedQuery("basecube", "select usersports.name, xusersports.name, yusersports.name,"
+      + " sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c1_usertable userdim_1 on basecube.userid = userdim_1.id "
+      + " join  (select user_interests_1.user_id as user_id, collect_set(usersports.name) as name from "
+      + getDbName() + "c1_user_interests_tbl user_interests_1 join " + getDbName() + "c1_sports_tbl usersports on "
+      + "user_interests_1.sport_id = usersports.id group by user_interests_1.user_id) "
+      + "usersports on userdim_1.id = usersports.user_id"
+      + " join " + getDbName() + "c1_usertable userdim_0 on basecube.yuserid = userdim_0.id "
+      + " join  (select user_interests_0.user_id as user_id,collect_set(yusersports.name) as name from "
+      + getDbName() + "c1_user_interests_tbl user_interests_0 join " + getDbName() + "c1_sports_tbl yusersports on "
+      + " user_interests_0.sport_id = yusersports.id group by user_interests_0.user_id) yusersports on userdim_0.id ="
+      + " yusersports.user_id join " + getDbName() + "c1_usertable userdim on basecube.xuserid = userdim.id"
+      + " join  (select user_interests.user_id as user_id,collect_set(xusersports.name) as name from "
+      + getDbName() + "c1_user_interests_tbl user_interests join " + getDbName() + "c1_sports_tbl xusersports"
+      + " on user_interests.sport_id = xusersports.id group by user_interests.user_id) xusersports on userdim.id = "
+      + " xusersports.user_id", null, "group by usersports.name, xusersports.name, yusersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+    // run with chain ref column
+    query = "select sports, xsports, ysports, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    hqlQuery = rewrite(query, hConf);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testBridgeTablesWithDimTablePartitioning() throws Exception {
+    Configuration conf = new Configuration(hConf);
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, conf);
+    String expected = getExpectedQuery("basecube", "select usersports.name, sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c2_usertable userdim ON basecube.userid = userdim.id and userdim.dt='latest' "
+        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name"
+        + " from " + getDbName() + "c2_user_interests_tbl user_interests"
+        + " join " + getDbName() + "c2_sports_tbl usersports on user_interests.sport_id = usersports.id"
+        + " and usersports.dt='latest and user_interests.dt='latest'"
+        + " group by user_interests.user_id) usersports"
+        + " on userdim.id = usersports.user_id ",
+      null, "group by usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c2_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+    // run with chain ref column
+    query = "select sports, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    hqlQuery = rewrite(query, conf);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testBridgeTablesWithNormalJoins() throws Exception {
+    String query = "select usersports.name, cubestatecountry.name, cubecitystatecountry.name,"
+      + " sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, hConf);
+    String expected = getExpectedQuery("basecube", "select usersports.name, cubestatecountry.name, "
+      + "cubecitystatecountry.name, sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
+        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name"
+        + " from " + getDbName() + "c1_user_interests_tbl user_interests"
+        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
+        + " group by user_interests.user_id) usersports"
+        + " on userdim.id = usersports.user_id "
+        + " join " + getDbName() + "c1_citytable citydim on basecube.cityid = citydim.id and (citydim.dt = 'latest')"
+        + " join " + getDbName()
+        + "c1_statetable statedim_0 on citydim.stateid=statedim_0.id and statedim_0.dt='latest'"
+        + " join " + getDbName()
+        + "c1_countrytable cubecitystatecountry on statedim_0.countryid=cubecitystatecountry.id"
+        + " join " + getDbName() + "c1_statetable statedim on basecube.stateid=statedim.id and (statedim.dt = 'latest')"
+        + " join " + getDbName() + "c1_countrytable cubestatecountry on statedim.countryid=cubestatecountry.id ",
+      null, "group by usersports.name, cubestatecountry.name, cubecitystatecountry.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+    // run with chain ref column
+    query = "select sports, statecountry, citycountry, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    hqlQuery = rewrite(query, hConf);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/91ccec77/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 698f36c..e569fb1 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -306,7 +306,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
       System.err.println("__FAILED__ " + method + "\n\tExpected: " + expected + "\n\t---------\n\tActual: " + actual);
     }
-    assertTrue(actualTrimmed.toLowerCase().contains(expectedTrimmed.toLowerCase()));
+    assertTrue(actualTrimmed.toLowerCase().contains(expectedTrimmed.toLowerCase()), "Expected:" + expected
+      + "Actual:" + actual);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lens/blob/91ccec77/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
index 2cf92b9..6430ed1 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
@@ -86,7 +86,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     expectedClauses.add(getDbName()
       + "c1_testdim4tbl dim4chain on testdim3.testdim4id = dim4chain.id and (dim4chain.dt = 'latest')");
 
-    List<String> actualClauses = new ArrayList<String>();
+    List<String> actualClauses = new ArrayList<>();
     for (String clause : StringUtils.splitByWholeSeparator(getAutoResolvedFromString(rewrittenQuery), "join")) {
       if (StringUtils.isNotBlank(clause)) {
         actualClauses.add(clause.trim());
@@ -135,13 +135,13 @@ public class TestJoinResolver extends TestQueryRewrite {
     String hqlQuery = rewrite(query, hconf);
     String expected = getExpectedQuery("testcube", "select citydim.name, testDim4.name, sum(testcube.msr2) FROM ",
       " left outer JOIN " + getDbName() + "c1_citytable citydim on testcube.cityid = citydim.id +"
-      + " and (( citydim . name ) =  'FOOBAR' ) and (citydim.dt = 'latest')"
-      + " right outer join " + getDbName()
-      + "c1_testdim2tbl testdim2 on testcube.dim2 = testdim2.id and (testdim2.dt = 'latest')"
-      + " right outer join " + getDbName() + "c1_testdim3tbl testdim3 on testdim2.testdim3id = testdim3.id and "
-      + "(testdim3.dt = 'latest') "
-      + " right outer join " + getDbName() + "c1_testdim4tbl testdim4 on testdim3.testdim4id = testdim4.id and "
-      + "(( testdim4 . name ) =  'TESTDIM4NAME' ) and (testdim4.dt = 'latest')",
+        + " and (( citydim . name ) =  'FOOBAR' ) and (citydim.dt = 'latest')"
+        + " right outer join " + getDbName()
+        + "c1_testdim2tbl testdim2 on testcube.dim2 = testdim2.id and (testdim2.dt = 'latest')"
+        + " right outer join " + getDbName() + "c1_testdim3tbl testdim3 on testdim2.testdim3id = testdim3.id and "
+        + "(testdim3.dt = 'latest') "
+        + " right outer join " + getDbName() + "c1_testdim4tbl testdim4 on testdim3.testdim4id = testdim4.id and "
+        + "(( testdim4 . name ) =  'TESTDIM4NAME' ) and (testdim4.dt = 'latest')",
       null, "group by citydim.name, testdim4.name", null,
       getWhereForDailyAndHourly2days("testcube", "c1_summary3"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
@@ -177,7 +177,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     // Check that aliases are preserved in the join clause
     String expected = getExpectedQuery("testcube", "select cubecity.name, sum(testcube.msr2) FROM ",
       " left outer join " + getDbName()
-      + "c1_citytable cubecity ON testcube.cityid = cubecity.id and (cubecity.dt = 'latest')",
+        + "c1_citytable cubecity ON testcube.cityid = cubecity.id and (cubecity.dt = 'latest')",
       null, " group by cubecity.name", null, getWhereForHourly2days("testcube", "c1_testfact2"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
@@ -186,7 +186,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     // Check that aliases are preserved in the join clause
     expected = getExpectedQuery("testcube", "select cubecity.name, sum(testcube.msr2) FROM ",
       " full outer join " + getDbName()
-      + "c1_citytable cubecity ON testcube.cityid = cubecity.id and (cubecity.dt = 'latest')",
+        + "c1_citytable cubecity ON testcube.cityid = cubecity.id and (cubecity.dt = 'latest')",
       null, " group by cubecity.name", null, getWhereForHourly2days("testcube", "c1_testfact2"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
@@ -195,7 +195,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     // Check that aliases are preserved in the join clause
     expected = getExpectedQuery("testcube", "select cubecity.name, sum(testcube.msr2) FROM ",
       " right outer join " + getDbName()
-      + "c1_citytable cubecity ON testcube.cityid = cubecity.id",
+        + "c1_citytable cubecity ON testcube.cityid = cubecity.id",
       null, " and (cubecity.dt = 'latest') group by cubecity.name", null,
       getWhereForHourly2days("testcube", "c1_testfact2"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
@@ -224,7 +224,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     // Check that aliases are preserved in the join clause
     String expected = getExpectedQuery("t", "select cubecity.name, sum(t.msr2) FROM ",
       " left outer join " + getDbName()
-      + "c1_citytable cubecity ON t.cityid = cubecity.id and (cubecity.dt = 'latest')",
+        + "c1_citytable cubecity ON t.cityid = cubecity.id and (cubecity.dt = 'latest')",
       null, " group by cubecity.name", null, getWhereForHourly2days("t", "c1_testfact2"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
@@ -237,7 +237,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     String hqlQuery = rewrite(query, tConf);
     String expected =
       getExpectedQuery("citydim", "select citydim.name, citystate.name from ", " inner join " + getDbName()
-        + "c1_statetable citystate on citydim.stateid = citystate.id and (citystate.dt = 'latest')",
+          + "c1_statetable citystate on citydim.stateid = citystate.id and (citystate.dt = 'latest')",
         null, " limit 10", "c1_citytable", true);
     compareQueries(hqlQuery, expected);
   }
@@ -252,7 +252,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     String hqlQuery = rewrite(queryWithJoin, tConf);
     String expected =
       getExpectedQuery("citydim", "select citydim.name, statedim.name from ", " inner join " + getDbName()
-        + "c1_statetable statedim on citydim.stateid = statedim.id and citydim.dt='latest' and statedim.dt='latest'",
+          + "c1_statetable statedim on citydim.stateid = statedim.id and citydim.dt='latest' and statedim.dt='latest'",
         null, null, "c1_citytable", false);
     compareQueries(hqlQuery, expected);
   }
@@ -264,7 +264,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     String hqlQuery = rewrite(q1, hconf);
     String expected =
       getExpectedQuery("citydim", "select citydim.name, statedim.name from ", " left outer join " + getDbName()
-        + "c1_statetable statedim on citydim.stateid = statedim.id and citydim.dt='latest' and statedim.dt='latest'",
+          + "c1_statetable statedim on citydim.stateid = statedim.id and citydim.dt='latest' and statedim.dt='latest'",
         null, null, "c1_citytable", false);
     compareQueries(hqlQuery, expected);
 
@@ -273,7 +273,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     hqlQuery = rewrite(q2, hconf);
     expected =
       getExpectedQuery("citydim", "select citydim.name, statedim.name from ", " right outer join " + getDbName()
-        + "c1_statetable statedim on citydim.stateid = statedim.id and citydim.dt='latest' and statedim.dt='latest'",
+          + "c1_statetable statedim on citydim.stateid = statedim.id and citydim.dt='latest' and statedim.dt='latest'",
         null, null, "c1_citytable", false);
     compareQueries(hqlQuery, expected);
 
@@ -282,8 +282,10 @@ public class TestJoinResolver extends TestQueryRewrite {
     hqlQuery = rewrite(q3, hconf);
     expected =
       getExpectedQuery("citydim", "select citydim.name, statedim.name from ", " full outer join " + getDbName()
-        + "c1_statetable statedim on citydim.stateid = statedim.id and citydim.dt='latest' and statedim.dt='latest'",
+          + "c1_statetable statedim on citydim.stateid = statedim.id and citydim.dt='latest' and statedim.dt='latest'",
         null, null, "c1_citytable", false);
+    compareQueries(hqlQuery, expected);
+
   }
 
   @Test
@@ -294,7 +296,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     String hqlQuery = rewrite(q, conf);
     String expected =
       getExpectedQuery("citydim", "select citydim.name, citystate.name from ", " left outer join " + getDbName()
-        + "c1_statetable citystate on citydim.stateid = citystate.id and (citystate.dt = 'latest')",
+          + "c1_statetable citystate on citydim.stateid = citystate.id and (citystate.dt = 'latest')",
         null, " limit 10", "c1_citytable", true);
     compareQueries(hqlQuery, expected);
 
@@ -302,7 +304,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     hqlQuery = rewrite(q, conf);
     expected =
       getExpectedQuery("citydim", "select citydim.name, citystate.name from ", " right outer join " + getDbName()
-        + "c1_statetable citystate on citydim.stateid = citystate.id and (citydim.dt = 'latest')",
+          + "c1_statetable citystate on citydim.stateid = citystate.id and (citydim.dt = 'latest')",
         " citystate.dt='latest' ", "limit 10", "c1_citytable", false);
     compareQueries(hqlQuery, expected);
 
@@ -451,8 +453,8 @@ public class TestJoinResolver extends TestQueryRewrite {
 
     // Test 4 Dim only query with join chains
 
-    List<String> expectedClauses = new ArrayList<String>();
-    List<String> actualClauses = new ArrayList<String>();
+    List<String> expectedClauses = new ArrayList<>();
+    List<String> actualClauses = new ArrayList<>();
     String dimOnlyQuery = "select testDim2.name, testDim2.cityStateCapital FROM testDim2 where " + TWO_DAYS_RANGE;
     CubeQueryRewriter driver = new CubeQueryRewriter(hconf, hconf);
     CubeQueryContext rewrittenQuery = driver.rewrite(dimOnlyQuery);
@@ -636,7 +638,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     CubeQueryContext rewrittenQuery = driver.rewrite(dimOnlyQuery);
     rewrittenQuery.toHQL();
     Dimension citydim = CubeMetastoreClient.getInstance(hconf).getDimension("citydim");
-    Set<String> cdimTables = new HashSet<String>();
+    Set<String> cdimTables = new HashSet<>();
     for (CandidateDim cdim : rewrittenQuery.getCandidateDims().get(citydim)) {
       cdimTables.add(cdim.getName());
     }
@@ -660,183 +662,4 @@ public class TestJoinResolver extends TestQueryRewrite {
     assertLensExceptionInRewrite("select unreachableDim_chain.name from testcube where " + TWO_DAYS_RANGE, hconf,
       LensCubeErrorCode.NO_FACT_HAS_COLUMN);
   }
-
-  @Test
-  public void testBridgeTablesWithoutDimtablePartitioning() throws Exception {
-    Configuration conf = new Configuration(hconf);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
-    String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    String hqlQuery = rewrite(query, conf);
-    String expected = getExpectedQuery("basecube", "select usersports.name, sum(basecube.msr2) FROM ",
-      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
-        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name"
-        + " from " + getDbName() + "c1_user_interests_tbl user_interests"
-        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
-        + " group by user_interests.user_id) usersports"
-        + " on userdim.id = usersports.user_id ",
-      null, "group by usersports.name", null,
-      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
-    TestCubeRewriter.compareQueries(hqlQuery, expected);
-  }
-
-  @Test
-  public void testFlattenBridgeTablesOFF() throws Exception {
-    Configuration conf = new Configuration(hconf);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
-    conf.setBoolean(CubeQueryConfUtil.ENABLE_FLATTENING_FOR_BRIDGETABLES, false);
-    String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    String hqlQuery = rewrite(query, conf);
-    String expected = getExpectedQuery("basecube", "select usersports.name, sum(basecube.msr2) FROM ",
-      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
-        + " join " + getDbName() + "c1_user_interests_tbl user_interests on userdim.id = user_interests.user_id"
-        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id",
-      null, "group by usersports.name", null,
-      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
-    TestCubeRewriter.compareQueries(hqlQuery, expected);
-  }
-
-  @Test
-  public void testFlattenBridgeTablesWithCustomAggregate() throws Exception {
-    Configuration conf = new Configuration(hconf);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
-    conf.set(CubeQueryConfUtil.BRIDGE_TABLE_FIELD_AGGREGATOR, "custom_aggr");
-    String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    String hqlQuery = rewrite(query, conf);
-    String expected = getExpectedQuery("basecube", "select usersports.name, sum(basecube.msr2) FROM ",
-      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
-        + " join (select user_interests.user_id as user_id,custom_aggr(usersports.name) as name"
-        + " from " + getDbName() + "c1_user_interests_tbl user_interests"
-        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
-        + " group by user_interests.user_id) usersports"
-        + " on userdim.id = usersports.user_id ",
-      null, "group by usersports.name", null,
-      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
-    TestCubeRewriter.compareQueries(hqlQuery, expected);
-  }
-
-  @Test
-  public void testBridgeTablesWithMegringChains() throws Exception {
-    Configuration conf = new Configuration(hconf);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
-    String query = "select userInterestIds.sport_id, usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    String hqlQuery = rewrite(query, conf);
-    String expected = getExpectedQuery("basecube", "select userInterestIds.sport_id, usersports.name,"
-      + " sum(basecube.msr2) FROM ",
-      " join " + getDbName() + "c1_usertable userdim on basecube.userid = userdim.id join (select userinterestids"
-        + ".user_id as user_id,collect_set(userinterestids.sport_id) as sport_id from " + getDbName()
-        + "c1_user_interests_tbl userinterestids group by userinterestids.user_id) userinterestids on userdim.id = "
-        + "userinterestids.user_id join (select userinterestids.user_id as user_id,collect_set(usersports.name) as name"
-        + " from " + getDbName() + "c1_user_interests_tbl userinterestids join "
-        + getDbName() + "c1_sports_tbl usersports on userinterestids.sport_id = usersports.id"
-        + " group by userinterestids.user_id) usersports on userdim.id = usersports.user_id",
-       null, "group by userInterestIds.sport_id, usersports.name", null,
-      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
-    TestCubeRewriter.compareQueries(hqlQuery, expected);
-  }
-
-  @Test
-  public void testBridgeTablesWithMultipleFacts() throws Exception {
-    Configuration conf = new Configuration(hconf);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
-    String query = "select usersports.name, sum(msr2), sum(msr12) from basecube where " + TWO_DAYS_RANGE;
-    String hqlQuery = rewrite(query, conf);
-    String expected1 = getExpectedQuery("basecube",
-        "select usersports.name as `name`, sum(basecube.msr2) as `msr2` FROM ", " join " + getDbName()
-            + "c1_usertable userdim ON basecube.userid = userdim.id "
-            + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name" + " from "
-            + getDbName() + "c1_user_interests_tbl user_interests" + " join " + getDbName()
-            + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
-            + " group by user_interests.user_id) usersports" + " on userdim.id = usersports.user_id ", null,
-        "group by usersports.name", null, getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
-    String expected2 = getExpectedQuery("basecube",
-        "select usersports.name as `name`, sum(basecube.msr12) as `msr12` FROM ", " join " + getDbName()
-            + "c1_usertable userdim ON basecube.userid = userdim.id "
-            + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name" + " from "
-            + getDbName() + "c1_user_interests_tbl user_interests" + " join " + getDbName()
-            + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
-            + " group by user_interests.user_id) usersports" + " on userdim.id = usersports.user_id ", null,
-        "group by usersports.name", null,
-      getWhereForDailyAndHourly2days("basecube", "c1_testfact2_base"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
-    String lower = hqlQuery.toLowerCase();
-    assertTrue(
-      lower.startsWith("select coalesce(mq1.name, mq2.name) name, mq2.msr2 msr2, mq1.msr12 msr12 from ")
-      || lower.startsWith("select coalesce(mq1.name, mq2.name) name, mq1.msr2 msr2, mq2.msr12 msr12 from "), hqlQuery);
-
-    assertTrue(hqlQuery.contains("mq1 full outer join ") && hqlQuery.endsWith("mq2 on mq1.name <=> mq2.name"),
-      hqlQuery);
-  }
-
-  @Test
-  public void testBridgeTablesWithMultipleChains() throws Exception {
-    Configuration conf = new Configuration(hconf);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
-    String query = "select usersports.name, xusersports.name, yusersports.name, sum(msr2) from basecube where "
-      + TWO_DAYS_RANGE;
-    String hqlQuery = rewrite(query, conf);
-    String expected = getExpectedQuery("basecube", "select usersports.name, xusersports.name, yusersports.name,"
-      + " sum(basecube.msr2) FROM ",
-      " join " + getDbName() + "c1_usertable userdim_1 on basecube.userid = userdim_1.id "
-      + " join  (select user_interests_1.user_id as user_id, collect_set(usersports.name) as name from "
-      + getDbName() + "c1_user_interests_tbl user_interests_1 join " + getDbName() + "c1_sports_tbl usersports on "
-      + "user_interests_1.sport_id = usersports.id group by user_interests_1.user_id) "
-      + "usersports on userdim_1.id = usersports.user_id"
-      + " join " + getDbName() + "c1_usertable userdim_0 on basecube.yuserid = userdim_0.id "
-      + " join  (select user_interests_0.user_id as user_id,collect_set(yusersports.name) as name from "
-      + getDbName() + "c1_user_interests_tbl user_interests_0 join " + getDbName() + "c1_sports_tbl yusersports on "
-      + " user_interests_0.sport_id = yusersports.id group by user_interests_0.user_id) yusersports on userdim_0.id ="
-      + " yusersports.user_id join " + getDbName() + "c1_usertable userdim on basecube.xuserid = userdim.id"
-      + " join  (select user_interests.user_id as user_id,collect_set(xusersports.name) as name from "
-      + getDbName() + "c1_user_interests_tbl user_interests join " + getDbName() + "c1_sports_tbl xusersports"
-      + " on user_interests.sport_id = xusersports.id group by user_interests.user_id) xusersports on userdim.id = "
-      + " xusersports.user_id", null, "group by usersports.name, xusersports.name, yusersports.name", null,
-      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
-    TestCubeRewriter.compareQueries(hqlQuery, expected);
-  }
-  @Test
-  public void testBridgeTablesWithDimTablePartitioning() throws Exception {
-    Configuration conf = new Configuration(hconf);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
-    String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    String hqlQuery = rewrite(query, conf);
-    String expected = getExpectedQuery("basecube", "select usersports.name, sum(basecube.msr2) FROM ",
-      " join " + getDbName() + "c2_usertable userdim ON basecube.userid = userdim.id and userdim.dt='latest' "
-        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name"
-        + " from " + getDbName() + "c2_user_interests_tbl user_interests"
-        + " join " + getDbName() + "c2_sports_tbl usersports on user_interests.sport_id = usersports.id"
-        + " and usersports.dt='latest and user_interests.dt='latest'"
-        + " group by user_interests.user_id) usersports"
-        + " on userdim.id = usersports.user_id ",
-      null, "group by usersports.name", null,
-      getWhereForDailyAndHourly2days("basecube", "c2_testfact1_base"));
-    TestCubeRewriter.compareQueries(hqlQuery, expected);
-  }
-
-  @Test
-  public void testBridgeTablesWithNormalJoins() throws Exception {
-    Configuration conf = new Configuration(hconf);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
-    String query = "select usersports.name, cubestatecountry.name, cubecitystatecountry.name,"
-      + " sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    String hqlQuery = rewrite(query, conf);
-    String expected = getExpectedQuery("basecube", "select usersports.name, cubestatecountry.name, "
-      + "cubecitystatecountry.name, sum(basecube.msr2) FROM ",
-      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
-        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name"
-        + " from " + getDbName() + "c1_user_interests_tbl user_interests"
-        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
-        + " group by user_interests.user_id) usersports"
-        + " on userdim.id = usersports.user_id "
-        + " join " + getDbName() + "c1_citytable citydim on basecube.cityid = citydim.id and (citydim.dt = 'latest')"
-        + " join " + getDbName()
-        + "c1_statetable statedim_0 on citydim.stateid=statedim_0.id and statedim_0.dt='latest'"
-        + " join " + getDbName()
-        + "c1_countrytable cubecitystatecountry on statedim_0.countryid=cubecitystatecountry.id"
-        + " join " + getDbName() + "c1_statetable statedim on basecube.stateid=statedim.id and (statedim.dt = 'latest')"
-        + " join " + getDbName() + "c1_countrytable cubestatecountry on statedim.countryid=cubestatecountry.id ",
-      null, "group by usersports.name, cubestatecountry.name, cubecitystatecountry.name", null,
-      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
-    TestCubeRewriter.compareQueries(hqlQuery, expected);
-  }
 }


[17/51] [abbrv] lens git commit: LENS-908 : Add error codes for jdbc query execution exception

Posted by de...@apache.org.
LENS-908 : Add error codes for jdbc query execution exception


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/fdf04be0
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/fdf04be0
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/fdf04be0

Branch: refs/heads/current-release-line
Commit: fdf04be044e727e9d86708b7b0e24b0aec9ed6a3
Parents: 4d7c8e4
Author: Sushil Mohanty <su...@apache.org>
Authored: Wed Dec 30 18:30:46 2015 +0530
Committer: Sushil Mohanty <su...@apache.org>
Committed: Wed Dec 30 18:30:46 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/lens-errors.conf    |  6 ++--
 .../org/apache/lens/driver/hive/HiveDriver.java |  4 +--
 .../lens/driver/hive/LensHiveErrorCode.java     | 36 --------------------
 .../apache/lens/driver/hive/TestHiveDriver.java |  4 +--
 .../org/apache/lens/driver/jdbc/JDBCDriver.java | 25 +++++++++++++-
 .../apache/lens/driver/jdbc/TestJdbcDriver.java | 14 ++++++++
 .../server/api/error/LensDriverErrorCode.java   | 36 ++++++++++++++++++++
 .../server/query/QueryAPIErrorResponseTest.java | 24 ++++++-------
 .../lens/server/query/TestQueryService.java     |  4 +--
 9 files changed, 95 insertions(+), 58 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/fdf04be0/lens-api/src/main/resources/lens-errors.conf
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/lens-errors.conf b/lens-api/src/main/resources/lens-errors.conf
index 06960a0..c7ccea1 100644
--- a/lens-api/src/main/resources/lens-errors.conf
+++ b/lens-api/src/main/resources/lens-errors.conf
@@ -317,7 +317,7 @@ lensCubeErrorsForMetastore = [
 
 ]
 
-lensHiveErrors = [
+lensDriverErrors = [
   {
     errorCode = 4001
     httpStatusCode = ${BAD_REQUEST}
@@ -327,7 +327,7 @@ lensHiveErrors = [
   {
     errorCode = 4002
     httpStatusCode = ${INTERNAL_SERVER_ERROR}
-    errorMsg = "Hive Error : %s"
+    errorMsg = "Driver Execution Error : %s"
   }
 ]
 
@@ -364,4 +364,4 @@ lensCubeErrors = ${lensCubeErrorsForQuery}${lensCubeErrorsForMetastore}
 # Lens server and Lens client are only aware of errors array. They are not aware of any other array defined in
 # error configuration files. Hence an errors array is prepared which is a concatenation of all other error arrays.
 
-errors = ${lensCommonErrors}${lensServerErrors}${lensCubeErrors}${lensHiveErrors}
+errors = ${lensCommonErrors}${lensServerErrors}${lensCubeErrors}${lensDriverErrors}

http://git-wip-us.apache.org/repos/asf/lens/blob/fdf04be0/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index c7ef8f1..149c6ab 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -18,7 +18,7 @@
  */
 package org.apache.lens.driver.hive;
 
-import static org.apache.lens.driver.hive.LensHiveErrorCode.*;
+import static org.apache.lens.server.api.error.LensDriverErrorCode.*;
 import static org.apache.lens.server.api.util.LensUtil.getImplementations;
 
 import java.io.ByteArrayInputStream;
@@ -579,7 +579,7 @@ public class HiveDriver extends AbstractLensDriver {
     if (ex.getMessage().contains("SemanticException")) {
       throw new LensException(SEMANTIC_ERROR.getLensErrorInfo(), ex, ex.getMessage());
     }
-    throw new LensException(HIVE_ERROR.getLensErrorInfo(), ex, ex.getMessage());
+    throw new LensException(DRIVER_ERROR.getLensErrorInfo(), ex, ex.getMessage());
   }
 
   /*

http://git-wip-us.apache.org/repos/asf/lens/blob/fdf04be0/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/LensHiveErrorCode.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/LensHiveErrorCode.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/LensHiveErrorCode.java
deleted file mode 100644
index 3bac9e7..0000000
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/LensHiveErrorCode.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.driver.hive;
-
-import org.apache.lens.server.api.LensErrorInfo;
-
-public enum LensHiveErrorCode {
-
-  SEMANTIC_ERROR(4001, 10000), HIVE_ERROR(4002, 10000);
-
-  public LensErrorInfo getLensErrorInfo() {
-    return this.errorInfo;
-  }
-
-  LensHiveErrorCode(final int code, final int weight) {
-    this.errorInfo = new LensErrorInfo(code, weight, name());
-  }
-
-  private final LensErrorInfo errorInfo;
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/fdf04be0/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
index 06552ea..ba3f3d5 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
@@ -682,8 +682,8 @@ public class TestHiveDriver {
       driver.estimate(createExplainContext("SELECT ID FROM nonexist", conf));
       fail("Should not reach here");
     } catch (LensException e) {
-      assertEquals(LensUtil.getCauseMessage(e), "Error while"
-        + " compiling statement: FAILED: SemanticException [Error 10001]: Line 1:32 Table not found 'nonexist'");
+      assertTrue(LensUtil.getCauseMessage(e).contains("Error while"
+        + " compiling statement: FAILED: SemanticException [Error 10001]: Line 1:32 Table not found 'nonexist'"));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/fdf04be0/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
index d3fa904..82d7513 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
@@ -43,6 +43,7 @@ import org.apache.lens.api.query.QueryPrepareHandle;
 import org.apache.lens.cube.parse.HQLParser;
 import org.apache.lens.server.api.driver.*;
 import org.apache.lens.server.api.driver.DriverQueryStatus.DriverQueryState;
+import org.apache.lens.server.api.error.LensDriverErrorCode;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.events.LensEventListener;
 import org.apache.lens.server.api.metrics.MethodMetricsContext;
@@ -56,6 +57,7 @@ import org.apache.lens.server.api.query.constraint.QueryLaunchingConstraint;
 import org.apache.lens.server.api.query.cost.FactPartitionBasedQueryCost;
 import org.apache.lens.server.api.query.cost.QueryCost;
 import org.apache.lens.server.api.query.rewrite.QueryRewriter;
+import org.apache.lens.server.api.util.LensUtil;
 import org.apache.lens.server.model.LogSegregationContext;
 import org.apache.lens.server.model.MappedDiagnosticLogSegregationContext;
 
@@ -795,7 +797,7 @@ public class JDBCDriver extends AbstractLensDriver {
         throw new LensException(stmt.getWarnings());
       }
     } catch (SQLException sql) {
-      throw new LensException(sql);
+      handleJDBCSQLException(sql);
     } finally {
       if (conn != null) {
         try {
@@ -810,6 +812,27 @@ public class JDBCDriver extends AbstractLensDriver {
     return stmt;
   }
 
+  /**
+   * Handle sql exception
+   *
+   * @param sqlex SQLException
+   * @throws LensException
+   */
+  private LensException handleJDBCSQLException(SQLException sqlex) throws LensException {
+    String cause = LensUtil.getCauseMessage(sqlex);
+    if (getSqlSynataxExceptions(sqlex).contains("SyntaxError")) {
+      throw new LensException(LensDriverErrorCode.SEMANTIC_ERROR.getLensErrorInfo(), sqlex, cause);
+    }
+    throw new LensException(LensDriverErrorCode.DRIVER_ERROR.getLensErrorInfo(), sqlex, cause);
+  }
+
+  private String getSqlSynataxExceptions(Throwable e) {
+    String exp = null;
+    if (e.getCause() != null) {
+      exp = e.getClass() + getSqlSynataxExceptions(e.getCause());
+    }
+    return exp;
+  }
 
   /**
    * Prepare the given query.

http://git-wip-us.apache.org/repos/asf/lens/blob/fdf04be0/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
index 2ad9fcb..b96cf88 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
@@ -411,6 +411,20 @@ public class TestJdbcDriver {
     }
   }
 
+  @Test
+  public void testJdbcSqlException() throws Exception {
+    final String query = "SELECT invalid_column FROM execute_test";
+    try {
+      PreparedQueryContext pContext = new PreparedQueryContext(query, "SA", baseConf, drivers);
+      driver.validate(pContext);
+      driver.prepare(pContext);
+    } catch (LensException e) {
+      assertEquals(e.getErrorInfo().getErrorCode(), 4001);
+      assertEquals(e.getErrorInfo().getErrorName(), "SEMANTIC_ERROR");
+      assertTrue(e.getMessage().contains("user lacks privilege or object not found: EXECUTE_TEST"));
+    }
+  }
+
   /**
    * Test type casting of char, varchar, nvarchar and decimal type
    *

http://git-wip-us.apache.org/repos/asf/lens/blob/fdf04be0/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensDriverErrorCode.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensDriverErrorCode.java b/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensDriverErrorCode.java
new file mode 100644
index 0000000..0c6257b
--- /dev/null
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensDriverErrorCode.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.server.api.error;
+
+import org.apache.lens.server.api.LensErrorInfo;
+
+public enum LensDriverErrorCode {
+
+  SEMANTIC_ERROR(4001, 10000), DRIVER_ERROR(4002, 10000);
+
+  public LensErrorInfo getLensErrorInfo() {
+    return this.errorInfo;
+  }
+
+  LensDriverErrorCode(final int code, final int weight) {
+    this.errorInfo = new LensErrorInfo(code, weight, name());
+  }
+
+  private final LensErrorInfo errorInfo;
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/fdf04be0/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java b/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
index 69c3f46..6f8886c 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
@@ -27,12 +27,11 @@ import static org.apache.lens.server.common.RestAPITestUtil.*;
 import static org.apache.lens.server.common.TestDataUtils.*;
 import static org.apache.lens.server.error.LensServerErrorCode.*;
 
-import java.util.Arrays;
+import static org.testng.Assert.assertTrue;
 
 import javax.ws.rs.client.WebTarget;
 import javax.ws.rs.core.Application;
 import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
 import javax.xml.datatype.DatatypeConfigurationException;
 
 import org.apache.lens.api.LensConf;
@@ -40,6 +39,7 @@ import org.apache.lens.api.LensSessionHandle;
 import org.apache.lens.api.jaxb.LensJAXBContextResolver;
 import org.apache.lens.api.metastore.*;
 import org.apache.lens.api.query.SupportedQuerySubmitOperations;
+import org.apache.lens.api.result.LensAPIResult;
 import org.apache.lens.api.result.LensErrorTO;
 import org.apache.lens.cube.error.ColUnAvailableInTimeRange;
 import org.apache.lens.server.LensJerseyTest;
@@ -160,21 +160,21 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
     final String testQuery = "select * from non_existing_table";
     Response response = estimate(target(), Optional.of(sessionId), Optional.of(testQuery));
 
-    final String expectedErrMsg = "Semantic Error : Error while compiling statement: "
+    final String expectedErrMsg1 = "Semantic Error : Error while compiling statement: "
       + "FAILED: SemanticException [Error 10001]: Line 1:31 Table not found 'non_existing_table'";
 
-    LensErrorTO childError1 = LensErrorTO.composedOf(INTERNAL_SERVER_ERROR.getValue(),
-      expectedErrMsg, MOCK_STACK_TRACE);
-    LensErrorTO childError2 = LensErrorTO.composedOf(INTERNAL_SERVER_ERROR.getValue(),
-        expectedErrMsg, MOCK_STACK_TRACE);
+    final String expectedErrMsg2 = "Semantic Error : user lacks privilege or object not found: NON_EXISTING_TABLE";
 
-    LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(INTERNAL_SERVER_ERROR.getValue(),
-        expectedErrMsg, MOCK_STACK_TRACE, Arrays.asList(childError1, childError2));
+    LensErrorTO expectedLensErrorTO1 = LensErrorTO.composedOf(INTERNAL_SERVER_ERROR.getValue(),
+            expectedErrMsg1, MOCK_STACK_TRACE);
 
-    ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(Status.BAD_REQUEST,
-      expectedLensErrorTO);
+    LensErrorTO expectedLensErrorTO2 = LensErrorTO.composedOf(INTERNAL_SERVER_ERROR.getValue(),
+            expectedErrMsg2, MOCK_STACK_TRACE);
 
-    expectedData.verify(response);
+    LensErrorTO responseLensErrorTO = response.readEntity(LensAPIResult.class).getLensErrorTO();
+
+    assertTrue(expectedLensErrorTO1.getMessage().equals(responseLensErrorTO.getMessage())
+            || expectedLensErrorTO2.getMessage().equals(responseLensErrorTO.getMessage()));
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lens/blob/fdf04be0/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index 82afcdc..c5d75de 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -49,12 +49,12 @@ import org.apache.lens.api.result.LensErrorTO;
 import org.apache.lens.api.result.QueryCostTO;
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.driver.hive.HiveDriver;
-import org.apache.lens.driver.hive.LensHiveErrorCode;
 import org.apache.lens.server.LensJerseyTest;
 import org.apache.lens.server.LensServerTestUtil;
 import org.apache.lens.server.LensServices;
 import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.driver.LensDriver;
+import org.apache.lens.server.api.error.LensDriverErrorCode;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.metrics.LensMetricsRegistry;
 import org.apache.lens.server.api.metrics.MetricsService;
@@ -460,7 +460,7 @@ public class TestQueryService extends LensJerseyTest {
     List<LensErrorTO> childErrors = result.getLensErrorTO().getChildErrors();
     boolean hiveSemanticErrorExists=false;
     for (LensErrorTO error : childErrors) {
-      if (error.getCode() == LensHiveErrorCode.SEMANTIC_ERROR.getLensErrorInfo().getErrorCode()) {
+      if (error.getCode() == LensDriverErrorCode.SEMANTIC_ERROR.getLensErrorInfo().getErrorCode()) {
         hiveSemanticErrorExists = true;
         break;
       }


[50/51] [abbrv] lens git commit: LENS-920 : Fix issues in producing and consuming json for all api

Posted by de...@apache.org.
LENS-920 : Fix issues in producing and consuming json for all api


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/d559ef2e
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/d559ef2e
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/d559ef2e

Branch: refs/heads/current-release-line
Commit: d559ef2e524ee6c28d45b35cca827fd507b15665
Parents: a5091fe
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Tue Feb 2 18:50:28 2016 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Feb 2 18:50:28 2016 +0530

----------------------------------------------------------------------
 lens-api/pom.xml                                |   8 +
 .../java/org/apache/lens/api/APIResult.java     |  29 +-
 .../main/java/org/apache/lens/api/LensConf.java |   2 +-
 .../org/apache/lens/api/LensSessionHandle.java  |   3 +
 .../main/java/org/apache/lens/api/Priority.java |   5 +
 .../java/org/apache/lens/api/UUIDAdapter.java   |  50 ++
 .../apache/lens/api/query/QueryCostType.java    |   5 +
 .../org/apache/lens/api/query/QueryHandle.java  |   4 +
 .../lens/api/query/QueryPrepareHandle.java      |   4 +
 .../lens/api/query/QueryResultSetMetadata.java  |   2 +
 .../org/apache/lens/api/query/QueryStatus.java  |   4 +
 .../apache/lens/api/query/ResultColumnType.java |   2 +
 .../lens/api/query/SchedulerJobHandle.java      |   4 +
 .../api/query/SchedulerJobInstanceHandle.java   |   4 +
 .../org/apache/lens/api/result/QueryCostTO.java |   2 +-
 .../MoxyJsonConfigurationContextResolver.java   |  38 +
 lens-api/src/main/resources/cube-0.1.xsd        |  12 +-
 .../apache/lens/cli/LensCliApplicationTest.java |   2 +-
 .../cli/TestLensDimensionTableCommands.java     |   2 +-
 .../resources/cube_with_no_weight_facts.xml     |  16 +-
 .../resources/dim-local-storage-element.xml     |   2 +-
 lens-cli/src/test/resources/dim_table.xml       |  10 +-
 lens-cli/src/test/resources/dim_table2.xml      |   8 +-
 .../resources/fact-local-storage-element.xml    |   2 +-
 lens-cli/src/test/resources/fact1.xml           |  10 +-
 .../src/test/resources/fact_without_weight.xml  |  10 +-
 .../src/test/resources/lens-client-site.xml     |   4 +-
 lens-cli/src/test/resources/logback.xml         |  49 ++
 lens-cli/src/test/resources/sample-cube.xml     |  18 +-
 lens-cli/src/test/resources/test-detail.xml     |   4 +-
 lens-cli/src/test/resources/test-dimension.xml  |  16 +-
 .../org/apache/lens/client/LensConnection.java  |   5 +-
 .../apache/lens/client/LensMetadataClient.java  | 196 ++---
 .../org/apache/lens/client/LensStatement.java   |  34 +-
 .../apache/lens/examples/SampleMetastore.java   |   3 +
 lens-examples/src/main/resources/city.xml       |   8 +-
 .../src/main/resources/city_subset.xml          |   4 +-
 lens-examples/src/main/resources/city_table.xml |  10 +-
 lens-examples/src/main/resources/customer.xml   |  14 +-
 .../src/main/resources/customer_table.xml       |  14 +-
 lens-examples/src/main/resources/dim_table.xml  |  10 +-
 lens-examples/src/main/resources/dim_table2.xml |   8 +-
 lens-examples/src/main/resources/dim_table3.xml |   8 +-
 lens-examples/src/main/resources/dim_table4.xml |  10 +-
 lens-examples/src/main/resources/fact1.xml      |  10 +-
 lens-examples/src/main/resources/fact2.xml      |  12 +-
 lens-examples/src/main/resources/product.xml    |  16 +-
 .../src/main/resources/product_db_table.xml     |  14 +-
 .../src/main/resources/product_table.xml        |  16 +-
 lens-examples/src/main/resources/rawfact.xml    |  16 +-
 .../resources/sales-aggr-continuous-fact.xml    |  26 +-
 .../src/main/resources/sales-aggr-fact1.xml     |  38 +-
 .../src/main/resources/sales-aggr-fact2.xml     |  28 +-
 lens-examples/src/main/resources/sales-cube.xml |  54 +-
 .../src/main/resources/sales-raw-fact.xml       |  40 +-
 .../src/main/resources/sample-cube.xml          |  16 +-
 .../main/resources/sample-db-only-dimension.xml |   8 +-
 .../src/main/resources/sample-dimension.xml     |   8 +-
 .../src/main/resources/sample-dimension2.xml    |   6 +-
 .../java/org/apache/lens/ml/server/MLApp.java   |   5 +
 .../java/org/apache/lens/ml/TestMLRunner.java   |   8 -
 .../server/api/driver/InMemoryResultSet.java    |   2 +-
 lens-server/pom.xml                             |   9 -
 .../java/org/apache/lens/server/BaseApp.java    |  53 ++
 .../java/org/apache/lens/server/LensServer.java |   1 -
 .../apache/lens/server/ServerModeFilter.java    |  15 +-
 .../metastore/CubeMetastoreServiceImpl.java     |  24 +-
 .../apache/lens/server/metastore/JAXBUtils.java |  10 +-
 .../lens/server/metastore/MetastoreApp.java     |  29 +-
 .../server/metastore/MetastoreResource.java     |  20 +-
 .../org/apache/lens/server/query/QueryApp.java  |  24 +-
 .../lens/server/query/QueryServiceResource.java |   4 +-
 .../lens/server/query/save/SavedQueryApp.java   |  22 +-
 .../server/query/save/SavedQueryResource.java   |   4 +-
 .../lens/server/scheduler/SchedulerApp.java     |  22 +-
 .../apache/lens/server/session/SessionApp.java  |  23 +-
 .../lens/server/session/SessionResource.java    |   2 +-
 .../java/org/apache/lens/server/ui/UIApp.java   |  20 +-
 .../src/main/resources/lensserver-default.xml   |  12 +-
 .../server/LensAllApplicationJerseyTest.java    |  16 +-
 .../org/apache/lens/server/LensJerseyTest.java  |  37 +-
 .../apache/lens/server/LensServerTestUtil.java  |  49 +-
 .../apache/lens/server/TestLensApplication.java |  13 +-
 .../org/apache/lens/server/TestServerMode.java  |   3 +-
 .../apache/lens/server/TestServerRestart.java   |  71 +-
 .../server/common/FormDataMultiPartFactory.java |  55 +-
 .../lens/server/common/RestAPITestUtil.java     | 157 ++--
 .../server/healthcheck/TestHealthChecks.java    |  20 +-
 .../server/metastore/TestMetastoreService.java  | 799 ++++++++++---------
 .../metastore/TestMetastoreServiceJSON.java     |  28 -
 .../metrics/TestResourceMethodMetrics.java      |  18 +-
 .../server/query/QueryAPIErrorResponseTest.java |  96 +--
 .../apache/lens/server/query/TestLensDAO.java   |  15 +-
 .../lens/server/query/TestQueryConstraints.java |  35 +-
 .../server/query/TestQueryEndEmailNotifier.java |  65 +-
 .../lens/server/query/TestQueryService.java     | 493 ++++++------
 .../lens/server/query/TestResultFormatting.java |  62 +-
 .../query/save/TestSavedQueryService.java       |   8 -
 .../server/session/TestSessionResource.java     | 157 ++--
 .../lens/server/ui/TestSessionUIResource.java   |  12 -
 pom.xml                                         |  17 +-
 src/site/apt/admin/config.apt                   | 128 +--
 102 files changed, 1858 insertions(+), 1768 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/pom.xml
----------------------------------------------------------------------
diff --git a/lens-api/pom.xml b/lens-api/pom.xml
index 8aa2f67..1704b62 100644
--- a/lens-api/pom.xml
+++ b/lens-api/pom.xml
@@ -65,6 +65,14 @@
       <groupId>org.jvnet.jaxb2_commons</groupId>
       <artifactId>jaxb2-basics-runtime</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.media</groupId>
+      <artifactId>jersey-media-jaxb</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.media</groupId>
+      <artifactId>jersey-media-moxy</artifactId>
+    </dependency>
   </dependencies>
   <build>
     <plugins>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/APIResult.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/APIResult.java b/lens-api/src/main/java/org/apache/lens/api/APIResult.java
index 0cdee0d..f50d3ea 100644
--- a/lens-api/src/main/java/org/apache/lens/api/APIResult.java
+++ b/lens-api/src/main/java/org/apache/lens/api/APIResult.java
@@ -28,21 +28,24 @@ import javax.xml.bind.annotation.*;
 import org.apache.lens.api.jaxb.LensJAXBContext;
 
 import lombok.AccessLevel;
+import lombok.AllArgsConstructor;
 import lombok.Getter;
 import lombok.NoArgsConstructor;
 
 /**
  * APIResult is the output returned by all the APIs; status-SUCCEEDED or FAILED message- detailed message.
  */
-@XmlRootElement(name = "result")
+@XmlRootElement
 @XmlAccessorType(XmlAccessType.FIELD)
+/*
+ * Instantiates a new API result with values
+ */
+@AllArgsConstructor
 /**
  * Instantiates a new API result.
  */
 @NoArgsConstructor(access = AccessLevel.PROTECTED)
 public class APIResult {
-
-  private static final APIResult SUCCESS = new APIResult(Status.SUCCEEDED, "");
   /**
    * The status.
    */
@@ -73,6 +76,8 @@ public class APIResult {
   /**
    * API Result status.
    */
+  @XmlType
+  @XmlEnum
   public enum Status {
 
     /**
@@ -89,23 +94,6 @@ public class APIResult {
     FAILED
   }
 
-  /**
-   * Instantiates a new API result.
-   *
-   * @param status  the status
-   * @param message the message
-   */
-  public APIResult(Status status, String message) {
-    super();
-    this.status = status;
-    this.message = message;
-  }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see java.lang.Object#toString()
-   */
   @Override
   public String toString() {
     try {
@@ -117,6 +105,7 @@ public class APIResult {
       return e.getMessage();
     }
   }
+  private static final APIResult SUCCESS = new APIResult(Status.SUCCEEDED, "");
 
   public static APIResult partial(int actual, int expected) {
     return new APIResult(Status.PARTIAL, actual + " out of " + expected);

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/LensConf.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/LensConf.java b/lens-api/src/main/java/org/apache/lens/api/LensConf.java
index ff965d6..67d6461 100644
--- a/lens-api/src/main/java/org/apache/lens/api/LensConf.java
+++ b/lens-api/src/main/java/org/apache/lens/api/LensConf.java
@@ -51,7 +51,7 @@ public class LensConf implements Serializable {
    */
   @XmlElementWrapper
   @Getter
-  private final Map<String, String> properties = new HashMap<String, String>();
+  private final Map<String, String> properties = new HashMap<>();
 
   /**
    * Adds the property.

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/LensSessionHandle.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/LensSessionHandle.java b/lens-api/src/main/java/org/apache/lens/api/LensSessionHandle.java
index d4327a6..dc9142d 100644
--- a/lens-api/src/main/java/org/apache/lens/api/LensSessionHandle.java
+++ b/lens-api/src/main/java/org/apache/lens/api/LensSessionHandle.java
@@ -31,6 +31,7 @@ import javax.xml.bind.Marshaller;
 import javax.xml.bind.Unmarshaller;
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
 
 import org.apache.lens.api.jaxb.LensJAXBContext;
 
@@ -63,6 +64,7 @@ public class LensSessionHandle {
    */
   @XmlElement
   @Getter
+  @XmlJavaTypeAdapter(UUIDAdapter.class)
   private UUID publicId;
 
   /**
@@ -70,6 +72,7 @@ public class LensSessionHandle {
    */
   @XmlElement
   @Getter
+  @XmlJavaTypeAdapter(UUIDAdapter.class)
   private UUID secretId;
 
   /**

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/Priority.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/Priority.java b/lens-api/src/main/java/org/apache/lens/api/Priority.java
index 0e5a65d..364f892 100644
--- a/lens-api/src/main/java/org/apache/lens/api/Priority.java
+++ b/lens-api/src/main/java/org/apache/lens/api/Priority.java
@@ -21,9 +21,14 @@
  */
 package org.apache.lens.api;
 
+import javax.xml.bind.annotation.XmlEnum;
+import javax.xml.bind.annotation.XmlType;
+
 /**
  * The Enum Priority.
  */
+@XmlEnum
+@XmlType
 public enum Priority {
 
   /**

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/UUIDAdapter.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/UUIDAdapter.java b/lens-api/src/main/java/org/apache/lens/api/UUIDAdapter.java
new file mode 100644
index 0000000..9d6fa77
--- /dev/null
+++ b/lens-api/src/main/java/org/apache/lens/api/UUIDAdapter.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ *
+ */
+package org.apache.lens.api;
+
+import java.util.UUID;
+
+import javax.xml.bind.annotation.adapters.XmlAdapter;
+
+public class UUIDAdapter extends XmlAdapter<String, UUID> {
+
+  @Override
+  public UUID unmarshal(String s) {
+    if (null == s) {
+      return null;
+    }
+    try {
+      return UUID.fromString(s);
+    } catch (IllegalArgumentException e) {
+      return null;
+    }
+  }
+
+  @Override
+  public String marshal(UUID uuid) {
+    if (uuid == null) {
+      return null;
+    }
+    return uuid.toString();
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/query/QueryCostType.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/QueryCostType.java b/lens-api/src/main/java/org/apache/lens/api/query/QueryCostType.java
index 1485e1d..37eda42 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/QueryCostType.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/QueryCostType.java
@@ -21,6 +21,11 @@
  */
 package org.apache.lens.api.query;
 
+import javax.xml.bind.annotation.XmlEnum;
+import javax.xml.bind.annotation.XmlType;
+
+@XmlType
+@XmlEnum
 public enum QueryCostType {
   LOW, MEDIUM, HIGH
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/query/QueryHandle.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/QueryHandle.java b/lens-api/src/main/java/org/apache/lens/api/query/QueryHandle.java
index 9f20184..2d4c706 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/QueryHandle.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/QueryHandle.java
@@ -26,6 +26,9 @@ import java.util.UUID;
 
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
+
+import org.apache.lens.api.UUIDAdapter;
 
 import org.apache.commons.lang.StringUtils;
 
@@ -64,6 +67,7 @@ public class QueryHandle extends QuerySubmitResult implements Serializable {
    */
   @XmlElement
   @Getter
+  @XmlJavaTypeAdapter(UUIDAdapter.class)
   private UUID handleId;
 
   /**

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/query/QueryPrepareHandle.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/QueryPrepareHandle.java b/lens-api/src/main/java/org/apache/lens/api/query/QueryPrepareHandle.java
index 756287e..38e87b1 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/QueryPrepareHandle.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/QueryPrepareHandle.java
@@ -25,6 +25,9 @@ import java.util.UUID;
 
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
+
+import org.apache.lens.api.UUIDAdapter;
 
 import lombok.*;
 
@@ -56,6 +59,7 @@ public class QueryPrepareHandle extends QuerySubmitResult {
    */
   @XmlElement
   @Getter
+  @XmlJavaTypeAdapter(UUIDAdapter.class)
   private UUID prepareHandleId;
 
   /**

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/query/QueryResultSetMetadata.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/QueryResultSetMetadata.java b/lens-api/src/main/java/org/apache/lens/api/query/QueryResultSetMetadata.java
index 28d9710..22c61cf 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/QueryResultSetMetadata.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/QueryResultSetMetadata.java
@@ -23,6 +23,7 @@ package org.apache.lens.api.query;
 
 import java.util.List;
 
+import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlElementWrapper;
 import javax.xml.bind.annotation.XmlRootElement;
 
@@ -52,6 +53,7 @@ public class QueryResultSetMetadata {
    * The columns.
    */
   @XmlElementWrapper
+  @XmlElement
   @Getter
   private List<ResultColumn> columns;
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java b/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java
index 44fd97e..915dac7 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/QueryStatus.java
@@ -24,7 +24,9 @@ package org.apache.lens.api.query;
 import java.io.Serializable;
 
 import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlEnum;
 import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlType;
 
 import org.apache.lens.api.result.LensErrorTO;
 
@@ -67,6 +69,8 @@ public class QueryStatus implements Serializable {
   /**
    * The Enum Status.
    */
+  @XmlType
+  @XmlEnum
   public enum Status {
 
     /**

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/query/ResultColumnType.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/ResultColumnType.java b/lens-api/src/main/java/org/apache/lens/api/query/ResultColumnType.java
index d0c21a7..c71d02e 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/ResultColumnType.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/ResultColumnType.java
@@ -21,12 +21,14 @@
  */
 package org.apache.lens.api.query;
 
+import javax.xml.bind.annotation.XmlEnum;
 import javax.xml.bind.annotation.XmlRootElement;
 
 /**
  * The Enum ResultColumnType.
  */
 @XmlRootElement
+@XmlEnum
 public enum ResultColumnType {
 
   /**

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobHandle.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobHandle.java b/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobHandle.java
index aa4dc13..4a2baba 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobHandle.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobHandle.java
@@ -23,6 +23,9 @@ import java.util.UUID;
 
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
+
+import org.apache.lens.api.UUIDAdapter;
 
 import org.apache.commons.lang.StringUtils;
 
@@ -51,6 +54,7 @@ public class SchedulerJobHandle implements Serializable {
    */
   @XmlElement
   @Getter
+  @XmlJavaTypeAdapter(UUIDAdapter.class)
   private UUID handleId;
 
   /**

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInstanceHandle.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInstanceHandle.java b/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInstanceHandle.java
index c124a38..f19d3f0 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInstanceHandle.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInstanceHandle.java
@@ -23,6 +23,9 @@ import java.util.UUID;
 
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
+
+import org.apache.lens.api.UUIDAdapter;
 
 import org.apache.commons.lang.StringUtils;
 
@@ -51,6 +54,7 @@ public class SchedulerJobInstanceHandle implements Serializable {
    */
   @XmlElement
   @Getter
+  @XmlJavaTypeAdapter(UUIDAdapter.class)
   private UUID handleId;
 
   /**

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/result/QueryCostTO.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/result/QueryCostTO.java b/lens-api/src/main/java/org/apache/lens/api/result/QueryCostTO.java
index a247e3c..266a4b7 100644
--- a/lens-api/src/main/java/org/apache/lens/api/result/QueryCostTO.java
+++ b/lens-api/src/main/java/org/apache/lens/api/result/QueryCostTO.java
@@ -70,6 +70,6 @@ public class QueryCostTO extends QuerySubmitResult implements Serializable {
    */
   @XmlElement
   @Getter
-  private QueryCostType type;
+  private QueryCostType costType;
 
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/java/org/apache/lens/api/util/MoxyJsonConfigurationContextResolver.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/util/MoxyJsonConfigurationContextResolver.java b/lens-api/src/main/java/org/apache/lens/api/util/MoxyJsonConfigurationContextResolver.java
new file mode 100644
index 0000000..6e5b73e
--- /dev/null
+++ b/lens-api/src/main/java/org/apache/lens/api/util/MoxyJsonConfigurationContextResolver.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.api.util;
+
+import javax.ws.rs.ext.ContextResolver;
+
+import org.eclipse.persistence.jaxb.MarshallerProperties;
+import org.glassfish.jersey.moxy.json.MoxyJsonConfig;
+
+public final class MoxyJsonConfigurationContextResolver implements ContextResolver<MoxyJsonConfig> {
+
+  @Override
+  public MoxyJsonConfig getContext(final Class<?> type) {
+    final MoxyJsonConfig configuration = new MoxyJsonConfig();
+    configuration.setIncludeRoot(true);
+    configuration.setFormattedOutput(true);
+    configuration.setMarshalEmptyCollections(false);
+    configuration.marshallerProperty(MarshallerProperties.JSON_WRAPPER_AS_ARRAY_NAME, true);
+    configuration.unmarshallerProperty(MarshallerProperties.JSON_WRAPPER_AS_ARRAY_NAME, true);
+    return configuration;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-api/src/main/resources/cube-0.1.xsd
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/cube-0.1.xsd b/lens-api/src/main/resources/cube-0.1.xsd
index d195b08..0f4dbea 100644
--- a/lens-api/src/main/resources/cube-0.1.xsd
+++ b/lens-api/src/main/resources/cube-0.1.xsd
@@ -225,7 +225,7 @@
     </xs:annotation>
     <xs:complexContent>
       <xs:extension base="x_field">
-        <xs:attribute type="x_measure_type" name="type" use="required"/>
+        <xs:attribute type="x_measure_type" name="_type" use="required"/>
         <xs:attribute type="xs:string" name="default_aggr">
           <xs:annotation>
             <xs:documentation>
@@ -323,7 +323,7 @@
           </xs:annotation>
           <xs:element type="x_expr_spec" name="expr_spec" maxOccurs="unbounded" minOccurs="1"/>
         </xs:sequence>
-        <xs:attribute type="xs:string" name="type" use="required">
+        <xs:attribute type="xs:string" name="_type" use="required">
           <xs:annotation>
             <xs:documentation>
               The type indicating what the evaluation of expression will produce. Allowed types are BOOLEAN, TINYINT,
@@ -424,7 +424,7 @@
             </xs:annotation>
           </xs:element>
         </xs:sequence>
-        <xs:attribute type="xs:string" name="type">
+        <xs:attribute type="xs:string" name="_type">
           <xs:annotation>
             <xs:documentation>
               The type indicating what the evaluation of expression will produce. Allowed types are BOOLEAN,TINYINT,
@@ -689,7 +689,7 @@
       </xs:documentation>
     </xs:annotation>
     <xs:attribute name="name" type="xs:string" use="required"/>
-    <xs:attribute type="xs:string" name="type" use="required">
+    <xs:attribute type="xs:string" name="_type" use="required">
       <xs:annotation>
         <xs:documentation>
           The type indicating what the evaluation of expression will produce. Allowed types are BOOLEAN, TINYINT,
@@ -1006,7 +1006,7 @@
       </xs:documentation>
     </xs:annotation>
     <xs:sequence>
-      <xs:element name="storage_table" minOccurs="1" maxOccurs="unbounded" type="x_storage_table_element"/>
+      <xs:element name="storage_table" minOccurs="0" maxOccurs="unbounded" type="x_storage_table_element"/>
     </xs:sequence>
   </xs:complexType>
 
@@ -1264,7 +1264,7 @@
         </xs:documentation>
       </xs:annotation>
     </xs:attribute>
-    <xs:attribute name="owner" type="xs:string" use="required">
+    <xs:attribute name="owner" type="xs:string" use="optional">
       <xs:annotation>
         <xs:documentation>
           Owner of the table.

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/java/org/apache/lens/cli/LensCliApplicationTest.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/LensCliApplicationTest.java b/lens-cli/src/test/java/org/apache/lens/cli/LensCliApplicationTest.java
index 8647ac3..8312335 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/LensCliApplicationTest.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/LensCliApplicationTest.java
@@ -32,7 +32,7 @@ import org.testng.annotations.BeforeTest;
  */
 public class LensCliApplicationTest extends LensAllApplicationJerseyTest {
   public static final String SUCCESS_MESSAGE = "<?xml version=\"1.0\" encoding=\"utf-8\" "
-    + "standalone=\"yes\"?><result><status>succeeded</status><message></message></result>";
+    + "standalone=\"yes\"?><apiresult><status>succeeded</status><message></message></apiresult>";
 
   @Override
   protected int getTestPort() {

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java
index 30f4ec1..bf92044 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensDimensionTableCommands.java
@@ -214,7 +214,7 @@ public class TestLensDimensionTableCommands extends LensCliApplicationTest {
     URL resource = TestLensDimensionTableCommands.class.getClassLoader().getResource("dim-local-storage-element.xml");
     command.addNewDimStorage("dim_table2", new File(resource.toURI()));
     result = command.getDimStorages("dim_table2");
-    assertEquals(DIM_LOCAL, result);
+    assertEquals(result, DIM_LOCAL);
 
     result = command.getStorageFromDim("dim_table2", DIM_LOCAL);
     String partString = "DAILY";

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/resources/cube_with_no_weight_facts.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/cube_with_no_weight_facts.xml b/lens-cli/src/test/resources/cube_with_no_weight_facts.xml
index 13736b2..4673ca4 100644
--- a/lens-cli/src/test/resources/cube_with_no_weight_facts.xml
+++ b/lens-cli/src/test/resources/cube_with_no_weight_facts.xml
@@ -22,18 +22,18 @@
 <x_base_cube name="cube_with_no_weight_facts" xmlns="uri:lens:cube:0.1"
  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
     <measures>
-        <measure name="measure1" type="BIGINT" />
-        <measure name="measure2" type="INT" default_aggr="SUM" />
-        <measure name="measure3" type="FLOAT" default_aggr="MAX" start_time='2013-12-12T00:00:00' />
-        <measure name="measure4" type="DOUBLE" default_aggr="MIN" />
+        <measure name="measure1" _type="BIGINT" />
+        <measure name="measure2" _type="INT" default_aggr="SUM" />
+        <measure name="measure3" _type="FLOAT" default_aggr="MAX" start_time='2013-12-12T00:00:00' />
+        <measure name="measure4" _type="DOUBLE" default_aggr="MIN" />
     </measures>
     <dim_attributes>
-        <dim_attribute name="dim1" type="INT" />
-        <dim_attribute name="dim2" type="INT" start_time='2013-12-01T00:00:00' />
-        <dim_attribute name="dim3" type="INT"/>
+        <dim_attribute name="dim1" _type="INT" />
+        <dim_attribute name="dim2" _type="INT" start_time='2013-12-01T00:00:00' />
+        <dim_attribute name="dim3" _type="INT"/>
     </dim_attributes>
     <expressions>
-        <expression name="expr_msr5" type="DOUBLE">
+        <expression name="expr_msr5" _type="DOUBLE">
             <expr_spec expr = "measure3 + measure4" end_time='2013-12-12T00:00:00'/>
             <expr_spec expr = "measure3 + measure4 + 0.01" start_time='2013-12-12T00:00:00'/>
         </expression>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/resources/dim-local-storage-element.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/dim-local-storage-element.xml b/lens-cli/src/test/resources/dim-local-storage-element.xml
index 6503261..ad557f9 100644
--- a/lens-cli/src/test/resources/dim-local-storage-element.xml
+++ b/lens-cli/src/test/resources/dim-local-storage-element.xml
@@ -27,7 +27,7 @@
   <storage_name>dim_local</storage_name>
   <table_desc external="true" field_delimiter="," table_location="${project.build.directory}/metastore/examples/local">
     <part_cols>
-      <column comment="Time column" name="dt" type="STRING"/>
+      <column comment="Time column" name="dt" _type="STRING"/>
     </part_cols>
     <time_part_cols>dt</time_part_cols>
   </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/resources/dim_table.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/dim_table.xml b/lens-cli/src/test/resources/dim_table.xml
index 3bc9600..eb4ddfc 100644
--- a/lens-cli/src/test/resources/dim_table.xml
+++ b/lens-cli/src/test/resources/dim_table.xml
@@ -22,10 +22,10 @@
 <x_dimension_table dimension_name="test_dim" table_name="dim_table" weight="100.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="ID" name="id" type="INT"/>
-    <column comment="name" name="name" type="STRING"/>
-    <column comment="more details" name="detail" type="STRING"/>
-    <column comment="d2 ID" name="d2id" type="INT"/>
+    <column comment="ID" name="id" _type="INT"/>
+    <column comment="name" name="name" _type="STRING"/>
+    <column comment="more details" name="detail" _type="STRING"/>
+    <column comment="d2 ID" name="d2id" _type="INT"/>
   </columns>
   <properties>
     <property name="dim1.prop" value="d1"/>
@@ -39,7 +39,7 @@
       <table_desc external="true" field_delimiter=","
         table_location="${project.build.directory}/metastore/examples/local">
         <part_cols>
-          <column comment="Time column" name="dt" type="STRING"/>
+          <column comment="Time column" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/resources/dim_table2.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/dim_table2.xml b/lens-cli/src/test/resources/dim_table2.xml
index 4c3f7d9..8440e28 100644
--- a/lens-cli/src/test/resources/dim_table2.xml
+++ b/lens-cli/src/test/resources/dim_table2.xml
@@ -22,9 +22,9 @@
 <x_dimension_table dimension_name="test_dim" table_name="dim_table2" weight="100.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="ID" name="id" type="INT"/>
-    <column comment="name" name="name" type="STRING"/>
-    <column comment="more details for dim2" name="detail2" type="STRING"/>
+    <column comment="ID" name="id" _type="INT"/>
+    <column comment="name" name="name" _type="STRING"/>
+    <column comment="more details for dim2" name="detail2" _type="STRING"/>
   </columns>
 
   <properties>
@@ -39,7 +39,7 @@
       <table_desc external="true" field_delimiter=","
         table_location="${project.build.directory}/metastore/examples/dim1">
         <part_cols>
-          <column comment="Time column" name="dt" type="STRING"/>
+          <column comment="Time column" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/resources/fact-local-storage-element.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/fact-local-storage-element.xml b/lens-cli/src/test/resources/fact-local-storage-element.xml
index 6f30a1c..15a847b 100644
--- a/lens-cli/src/test/resources/fact-local-storage-element.xml
+++ b/lens-cli/src/test/resources/fact-local-storage-element.xml
@@ -28,7 +28,7 @@
   <storage_name>fact_local</storage_name>
   <table_desc external="true" field_delimiter="," table_location="${project.build.directory}/metastore/examples/local">
     <part_cols>
-      <column comment="Time column" name="dt" type="STRING"/>
+      <column comment="Time column" name="dt" _type="STRING"/>
     </part_cols>
     <time_part_cols>dt</time_part_cols>
   </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/resources/fact1.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/fact1.xml b/lens-cli/src/test/resources/fact1.xml
index f6d1f9c..b18a393 100644
--- a/lens-cli/src/test/resources/fact1.xml
+++ b/lens-cli/src/test/resources/fact1.xml
@@ -22,10 +22,10 @@
 <x_fact_table cube_name="sample_cube" name="fact1" weight="100.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="" name="dim1" type="INT"/>
-    <column comment="" name="measure1" type="BIGINT"/>
-    <column comment="" name="measure2" type="INT"/>
-    <column comment="" name="measure3" type="FLOAT"/>
+    <column comment="" name="dim1" _type="INT"/>
+    <column comment="" name="measure1" _type="BIGINT"/>
+    <column comment="" name="measure2" _type="INT"/>
+    <column comment="" name="measure3" _type="FLOAT"/>
   </columns>
   <properties>
     <property name="fact1.prop" value="f1"/>
@@ -42,7 +42,7 @@
       <table_desc external="true" field_delimiter=","
         table_location="${project.build.directory}/metastore/examples/fact1_local">
         <part_cols>
-          <column comment="Time column" name="dt" type="STRING"/>
+          <column comment="Time column" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/resources/fact_without_weight.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/fact_without_weight.xml b/lens-cli/src/test/resources/fact_without_weight.xml
index 8371b0b..0c124b2 100644
--- a/lens-cli/src/test/resources/fact_without_weight.xml
+++ b/lens-cli/src/test/resources/fact_without_weight.xml
@@ -22,10 +22,10 @@
 <x_fact_table cube_name="cube_with_no_weight_facts" name="fact_without_wt" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="" name="dim1" type="INT"/>
-    <column comment="" name="measure1" type="BIGINT"/>
-    <column comment="" name="measure2" type="INT"/>
-    <column comment="" name="measure3" type="FLOAT"/>
+    <column comment="" name="dim1" _type="INT"/>
+    <column comment="" name="measure1" _type="BIGINT"/>
+    <column comment="" name="measure2" _type="INT"/>
+    <column comment="" name="measure3" _type="FLOAT"/>
   </columns>
   <properties>
     <property name="fact_without_wt.prop" value="f1"/>
@@ -42,7 +42,7 @@
       <table_desc external="true" field_delimiter=","
         table_location="${project.build.directory}/metastore/examples/fact_local_without_wt">
         <part_cols>
-          <column comment="Time column" name="dt" type="STRING"/>
+          <column comment="Time column" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/resources/lens-client-site.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/lens-client-site.xml b/lens-cli/src/test/resources/lens-client-site.xml
index e500bda..bf1443d 100644
--- a/lens-cli/src/test/resources/lens-client-site.xml
+++ b/lens-cli/src/test/resources/lens-client-site.xml
@@ -1,4 +1,4 @@
-<?xml version="1.0"?>
+<?xml version="1.0" encoding="UTF-8"?><?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
 <!--
 
   Licensed to the Apache Software Foundation (ASF) under one
@@ -19,8 +19,6 @@
   under the License.
 
 -->
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
 <configuration>
 
   <property>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/resources/logback.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/logback.xml b/lens-cli/src/test/resources/logback.xml
new file mode 100644
index 0000000..57cf4a4
--- /dev/null
+++ b/lens-cli/src/test/resources/logback.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<configuration>
+  <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+    <encoder>
+      <pattern>%d{dd MMM yyyy HH:mm:ss,SSS} [%X{logSegregationId}] [%t] %-5p %c - %m%n</pattern>
+    </encoder>
+    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+      <level>ERROR</level>
+    </filter>
+  </appender>
+  <appender name="TEST_LOG_FILE" class="ch.qos.logback.core.FileAppender">
+    <file>target/test.log</file>
+    <encoder>
+      <pattern>%d{dd MMM yyyy HH:mm:ss,SSS} %X{logSegregationId} [%t] %-5p %c - %m%n</pattern>
+    </encoder>
+    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+      <level>INFO</level>
+    </filter>
+  </appender>
+  <logger name="org.apache.lens.client" additivity="false" level="DEBUG">
+    <appender-ref ref="STDOUT"/>
+    <appender-ref ref="TEST_LOG_FILE"/>
+  </logger>
+  <root level="INFO">
+    <appender-ref ref="STDOUT"/>
+    <appender-ref ref="TEST_LOG_FILE"/>
+  </root>
+</configuration>
+

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/resources/sample-cube.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/sample-cube.xml b/lens-cli/src/test/resources/sample-cube.xml
index e3b3284..9bcf177 100644
--- a/lens-cli/src/test/resources/sample-cube.xml
+++ b/lens-cli/src/test/resources/sample-cube.xml
@@ -26,23 +26,23 @@
     <property name="cube.sample_cube.timed.dimensions.list" value="dt" />
   </properties>
   <measures>
-    <measure name="measure1" type="BIGINT" />
-    <measure name="measure2" type="INT" default_aggr="SUM" />
-    <measure name="measure3" type="FLOAT" default_aggr="MAX" start_time='2013-12-12T00:00:00' />
-    <measure name="measure4" type="DOUBLE" default_aggr="MIN" />
+    <measure name="measure1" _type="BIGINT" />
+    <measure name="measure2" _type="INT" default_aggr="SUM" />
+    <measure name="measure3" _type="FLOAT" default_aggr="MAX" start_time='2013-12-12T00:00:00' />
+    <measure name="measure4" _type="DOUBLE" default_aggr="MIN" />
   </measures>
   <dim_attributes>
-    <dim_attribute name="dim1" type="INT" />
-    <dim_attribute name="dim2" type="INT" start_time='2013-12-01T00:00:00' />
-    <dim_attribute name="dim3" type="INT"/>
-    <dim_attribute name="dimDetail" type="string" description="City name to which the customer belongs"
+    <dim_attribute name="dim1" _type="INT" />
+    <dim_attribute name="dim2" _type="INT" start_time='2013-12-01T00:00:00' />
+    <dim_attribute name="dim3" _type="INT"/>
+    <dim_attribute name="dimDetail" _type="string" description="City name to which the customer belongs"
                    display_string="Customer City">
       <chain_ref_column chain_name="testdimchain" ref_col="detail" />
       <chain_ref_column chain_name="testdetailchain" ref_col="name" />
     </dim_attribute>
   </dim_attributes>
   <expressions>
-    <expression name="expr_msr5" type="DOUBLE">
+    <expression name="expr_msr5" _type="DOUBLE">
       <expr_spec expr = "measure3 + measure4" end_time='2013-12-12T00:00:00'/>
       <expr_spec expr = "measure3 + measure4 + 0.01" start_time='2013-12-12T00:00:00'/>
     </expression>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/resources/test-detail.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/test-detail.xml b/lens-cli/src/test/resources/test-detail.xml
index bb54354..b51c188 100644
--- a/lens-cli/src/test/resources/test-detail.xml
+++ b/lens-cli/src/test/resources/test-detail.xml
@@ -22,8 +22,8 @@
 <x_dimension name="test_detail" xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
   xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <attributes>
-    <dim_attribute name="id" type="INT" />
-    <dim_attribute name="name" type="STRING" />
+    <dim_attribute name="id" _type="INT" />
+    <dim_attribute name="name" _type="STRING" />
   </attributes>
 
   <properties>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-cli/src/test/resources/test-dimension.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/test-dimension.xml b/lens-cli/src/test/resources/test-dimension.xml
index 2fa47f1..01de8e6 100644
--- a/lens-cli/src/test/resources/test-dimension.xml
+++ b/lens-cli/src/test/resources/test-dimension.xml
@@ -22,20 +22,20 @@
 <x_dimension name="test_dim" xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
   xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <attributes>
-    <dim_attribute name="id" type="INT" />
-    <dim_attribute name="name" type="STRING" />
-    <dim_attribute name="detail" type="STRING" start_time='2013-12-01T00:00:00' />
-    <dim_attribute name="d2id" type="INT" start_time='2013-12-01T00:00:00'/>
-    <dim_attribute name="inline" type="STRING" >
+    <dim_attribute name="id" _type="INT" />
+    <dim_attribute name="name" _type="STRING" />
+    <dim_attribute name="detail" _type="STRING" start_time='2013-12-01T00:00:00' />
+    <dim_attribute name="d2id" _type="INT" start_time='2013-12-01T00:00:00'/>
+    <dim_attribute name="inline" _type="STRING" >
       <values>A</values>
       <values>B</values>
       <values>C</values>
     </dim_attribute>
     <dim_attribute name="location">
       <hierarchy>
-        <dim_attribute name="zipcode" type="INT" />
-        <dim_attribute name="city" type="STRING" />
-        <dim_attribute name="state" type="STRING" />
+        <dim_attribute name="zipcode" _type="INT" />
+        <dim_attribute name="city" _type="STRING" />
+        <dim_attribute name="state" _type="STRING" />
       </hierarchy>
     </dim_attribute>
   </attributes>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-client/src/main/java/org/apache/lens/client/LensConnection.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensConnection.java b/lens-client/src/main/java/org/apache/lens/client/LensConnection.java
index 30a7e2c..eeb473a 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensConnection.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensConnection.java
@@ -34,12 +34,14 @@ import javax.ws.rs.core.Response;
 import org.apache.lens.api.APIResult;
 import org.apache.lens.api.LensSessionHandle;
 import org.apache.lens.api.StringList;
+import org.apache.lens.api.util.MoxyJsonConfigurationContextResolver;
 import org.apache.lens.client.exceptions.LensClientServerConnectionException;
 
 import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataMultiPart;
 import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.glassfish.jersey.moxy.json.MoxyJsonFeature;
 
 import lombok.Getter;
 import lombok.extern.slf4j.Slf4j;
@@ -110,7 +112,8 @@ public class LensConnection {
   }
 
   public Client buildClient() {
-    ClientBuilder cb = ClientBuilder.newBuilder().register(MultiPartFeature.class);
+    ClientBuilder cb = ClientBuilder.newBuilder().register(MultiPartFeature.class).register(MoxyJsonFeature.class)
+      .register(MoxyJsonConfigurationContextResolver.class);
     Iterator<Class<?>> itr = params.getRequestFilters().iterator();
     while (itr.hasNext()) {
       cb.register(itr.next());

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java b/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
index 4dec1a2..07ce41e 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
@@ -25,6 +25,7 @@ import java.util.Date;
 import java.util.List;
 
 import javax.ws.rs.client.*;
+import javax.ws.rs.core.GenericEntity;
 import javax.ws.rs.core.GenericType;
 import javax.ws.rs.core.MediaType;
 import javax.xml.bind.*;
@@ -35,8 +36,6 @@ import org.apache.lens.api.StringList;
 import org.apache.lens.api.jaxb.LensJAXBContext;
 import org.apache.lens.api.metastore.*;
 
-import org.glassfish.jersey.media.multipart.*;
-
 import com.google.common.base.Joiner;
 import lombok.extern.slf4j.Slf4j;
 
@@ -87,30 +86,27 @@ public class LensMetadataClient {
 
   public String getCurrentDatabase() {
     WebTarget target = getMetastoreWebTarget();
-    String database = target.path("databases").path("current")
+    return target.path("databases").path("current")
       .queryParam("sessionid", connection.getSessionHandle())
       .request().get(String.class);
-    return database;
   }
 
 
   public APIResult setDatabase(String database) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("databases").path("current")
+    return target.path("databases").path("current")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML_TYPE)
       .put(Entity.xml(database), APIResult.class);
-    return result;
   }
 
   public APIResult createDatabase(String database, boolean ignoreIfExists) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("databases")
+    return target.path("databases")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .queryParam("ignoreIfExisting", ignoreIfExists)
       .request(MediaType.APPLICATION_XML)
       .post(Entity.xml(database), APIResult.class);
-    return result;
   }
 
   public APIResult createDatabase(String database) {
@@ -156,19 +152,17 @@ public class LensMetadataClient {
 
   public APIResult dropAllCubes() {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("cubes")
+    return target.path("cubes")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML).delete(APIResult.class);
-    return result;
   }
 
   public APIResult createCube(XCube cube) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("cubes")
+    return target.path("cubes")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(objFact.createXCube(cube)), APIResult.class);
-    return result;
+      .post(Entity.xml(new GenericEntity<JAXBElement<XCube>>(objFact.createXCube(cube)){}), APIResult.class);
   }
 
   private <T> T readFromXML(String filename) throws JAXBException, IOException {
@@ -194,11 +188,10 @@ public class LensMetadataClient {
 
   public APIResult updateCube(String cubeName, XCube cube) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("cubes").path(cubeName)
+    return target.path("cubes").path(cubeName)
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .put(Entity.xml(objFact.createXCube(cube)), APIResult.class);
-    return result;
+      .put(Entity.xml(new GenericEntity<JAXBElement<XCube>>(objFact.createXCube(cube)){}), APIResult.class);
   }
 
   public APIResult updateCube(String cubeName, String cubeSpec) {
@@ -239,10 +232,9 @@ public class LensMetadataClient {
 
   public APIResult dropCube(String cubeName) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("cubes").path(cubeName)
+    return target.path("cubes").path(cubeName)
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML).delete(APIResult.class);
-    return result;
   }
 
   public List<String> getAllDimensions() {
@@ -255,19 +247,18 @@ public class LensMetadataClient {
 
   public APIResult dropAllDimensions() {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimensions")
+    return target.path("dimensions")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML).delete(APIResult.class);
-    return result;
   }
 
   public APIResult createDimension(XDimension dimension) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimensions")
+    return target.path("dimensions")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(objFact.createXDimension(dimension)), APIResult.class);
-    return result;
+      .post(Entity.xml(new GenericEntity<JAXBElement<XDimension>>(objFact.createXDimension(dimension)){}),
+        APIResult.class);
   }
 
   public APIResult createDimension(String dimSpec) {
@@ -280,11 +271,11 @@ public class LensMetadataClient {
 
   public APIResult updateDimension(String dimName, XDimension dimension) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimensions").path(dimName)
+    return target.path("dimensions").path(dimName)
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .put(Entity.xml(objFact.createXDimension(dimension)), APIResult.class);
-    return result;
+      .put(Entity.xml(new GenericEntity<JAXBElement<XDimension>>(objFact.createXDimension(dimension)){}),
+        APIResult.class);
   }
 
   public APIResult updateDimension(String dimName, String dimSpec) {
@@ -306,10 +297,9 @@ public class LensMetadataClient {
 
   public APIResult dropDimension(String dimName) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimensions").path(dimName)
+    return target.path("dimensions").path(dimName)
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML).delete(APIResult.class);
-    return result;
   }
 
   public List<String> getAllStorages() {
@@ -323,11 +313,10 @@ public class LensMetadataClient {
 
   public APIResult createNewStorage(XStorage storage) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("storages")
+    return target.path("storages")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(objFact.createXStorage(storage)), APIResult.class);
-    return result;
+      .post(Entity.xml(new GenericEntity<JAXBElement<XStorage>>(objFact.createXStorage(storage)){}), APIResult.class);
   }
 
 
@@ -341,20 +330,18 @@ public class LensMetadataClient {
 
   public APIResult dropAllStorages() {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("storages")
+    return target.path("storages")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
       .delete(APIResult.class);
-    return result;
   }
 
   public APIResult updateStorage(String storageName, XStorage storage) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("storages").path(storageName)
+    return target.path("storages").path(storageName)
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .put(Entity.xml(objFact.createXStorage(storage)), APIResult.class);
-    return result;
+      .put(Entity.xml(new GenericEntity<JAXBElement<XStorage>>(objFact.createXStorage(storage)){}), APIResult.class);
   }
 
   public APIResult updateStorage(String storageName, String storage) {
@@ -377,11 +364,10 @@ public class LensMetadataClient {
 
   public APIResult dropStorage(String storageName) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("storages").path(storageName)
+    return target.path("storages").path(storageName)
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
       .delete(APIResult.class);
-    return result;
   }
 
   public List<String> getAllFactTables(String cubeName) {
@@ -411,12 +397,11 @@ public class LensMetadataClient {
 
   public APIResult deleteAllFactTables(boolean cascade) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts")
+    return target.path("facts")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .queryParam("cascade", cascade)
       .request(MediaType.APPLICATION_XML)
       .delete(APIResult.class);
-    return result;
   }
 
 
@@ -432,17 +417,10 @@ public class LensMetadataClient {
 
   public APIResult createFactTable(XFactTable f) {
     WebTarget target = getMetastoreWebTarget();
-    FormDataMultiPart mp = new FormDataMultiPart();
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid")
-      .build(), this.connection.getSessionHandle(), MediaType.APPLICATION_XML_TYPE));
-    mp.bodyPart(new FormDataBodyPart(
-      FormDataContentDisposition.name("fact").fileName("fact").build(),
-      objFact.createXFactTable(f), MediaType.APPLICATION_XML_TYPE));
-    APIResult result = target.path("facts")
-      .request(MediaType.APPLICATION_XML_TYPE)
-      .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
-        APIResult.class);
-    return result;
+    return target.path("facts")
+      .queryParam("sessionid", this.connection.getSessionHandle())
+      .request(MediaType.APPLICATION_XML)
+      .post(Entity.xml(new GenericEntity<JAXBElement<XFactTable>>(objFact.createXFactTable(f)){}), APIResult.class);
   }
 
   public APIResult createFactTable(String factSpec) {
@@ -455,11 +433,10 @@ public class LensMetadataClient {
 
   public APIResult updateFactTable(String factName, XFactTable table) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(factName)
+    return target.path("facts").path(factName)
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML_TYPE)
-      .put(Entity.xml(objFact.createXFactTable(table)), APIResult.class);
-    return result;
+      .put(Entity.xml(new GenericEntity<JAXBElement<XFactTable>>(objFact.createXFactTable(table)){}), APIResult.class);
   }
 
   public APIResult updateFactTable(String factName, String table) {
@@ -472,12 +449,11 @@ public class LensMetadataClient {
 
   public APIResult dropFactTable(String factName, boolean cascade) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(factName)
+    return target.path("facts").path(factName)
       .queryParam("sessionid", this.connection.getSessionHandle())
       .queryParam("cascade", cascade)
       .request(MediaType.APPLICATION_XML)
       .delete(APIResult.class);
-    return result;
   }
 
   public APIResult dropFactTable(String factName) {
@@ -495,20 +471,19 @@ public class LensMetadataClient {
 
   public APIResult dropAllStoragesOfFactTable(String factName) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(factName).path("storages")
+    return target.path("facts").path(factName).path("storages")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
       .delete(APIResult.class);
-    return result;
   }
 
   public APIResult addStorageToFactTable(String factname, XStorageTableElement storage) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(factname).path("storages")
+    return target.path("facts").path(factname).path("storages")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(objFact.createXStorageTableElement(storage)), APIResult.class);
-    return result;
+      .post(Entity.xml(new GenericEntity<JAXBElement<XStorageTableElement>>(
+        objFact.createXStorageTableElement(storage)){}), APIResult.class);
   }
 
   public APIResult addStorageToFactTable(String factname, String storageSpec) {
@@ -521,11 +496,10 @@ public class LensMetadataClient {
 
   public APIResult dropStorageFromFactTable(String factName, String storageName) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(factName).path("storages").path(storageName)
+    return target.path("facts").path(factName).path("storages").path(storageName)
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
       .delete(APIResult.class);
-    return result;
   }
 
   public XStorageTableElement getStorageOfFactTable(String factName, String storageName) {
@@ -557,13 +531,12 @@ public class LensMetadataClient {
 
   public APIResult dropPartitionsOfFactTable(String factName, String storage, String filter) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(factName)
+    return target.path("facts").path(factName)
       .path("storages").path(storage).path("partitions")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .queryParam("filter", filter)
       .request(MediaType.APPLICATION_XML)
       .delete(APIResult.class);
-    return result;
   }
 
   public APIResult dropPartitionsOfFactTable(String factName, String storage) {
@@ -575,13 +548,12 @@ public class LensMetadataClient {
     String values = Joiner.on(",").skipNulls().join(partitions);
 
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(factName)
+    return target.path("facts").path(factName)
       .path("storages").path(storage).path("partitions")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .queryParam("values", values)
       .request(MediaType.APPLICATION_XML)
       .delete(APIResult.class);
-    return result;
   }
 
 
@@ -610,18 +582,11 @@ public class LensMetadataClient {
 
   public APIResult createDimensionTable(XDimensionTable table) {
     WebTarget target = getMetastoreWebTarget();
-
-    FormDataMultiPart mp = new FormDataMultiPart();
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(),
-      this.connection.getSessionHandle(), MediaType.APPLICATION_XML_TYPE));
-    mp.bodyPart(new FormDataBodyPart(
-      FormDataContentDisposition.name("dimensionTable").fileName("dimtable").build(),
-      objFact.createXDimensionTable(table), MediaType.APPLICATION_XML_TYPE));
-
-    APIResult result = target.path("dimtables")
+    return target.path("dimtables")
+      .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
-    return result;
+      .post(Entity.xml(new GenericEntity<JAXBElement<XDimensionTable>>(objFact.createXDimensionTable(table)){}),
+        APIResult.class);
   }
 
   public APIResult createDimensionTable(String tableXml) {
@@ -636,11 +601,11 @@ public class LensMetadataClient {
   public APIResult updateDimensionTable(XDimensionTable table) {
     String dimTableName = table.getTableName();
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTableName)
+    return target.path("dimtables").path(dimTableName)
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .put(Entity.xml(objFact.createXDimensionTable(table)), APIResult.class);
-    return result;
+      .put(Entity.xml(new GenericEntity<JAXBElement<XDimensionTable>>(objFact.createXDimensionTable(table)){}),
+        APIResult.class);
   }
 
   public APIResult updateDimensionTable(String dimTblName, String dimSpec) {
@@ -655,12 +620,11 @@ public class LensMetadataClient {
 
   public APIResult dropDimensionTable(String table, boolean cascade) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(table)
+    return target.path("dimtables").path(table)
       .queryParam("sessionid", this.connection.getSessionHandle())
       .queryParam("cascade", cascade)
       .request(MediaType.APPLICATION_XML)
       .delete(APIResult.class);
-    return result;
   }
 
   public APIResult dropDimensionTable(String table) {
@@ -689,11 +653,11 @@ public class LensMetadataClient {
 
   public APIResult addStorageToDimTable(String dimTblName, XStorageTableElement table) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTblName).path("storages")
+    return target.path("dimtables").path(dimTblName).path("storages")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(objFact.createXStorageTableElement(table)), APIResult.class);
-    return result;
+      .post(Entity.xml(new GenericEntity<JAXBElement<XStorageTableElement>>(
+        objFact.createXStorageTableElement(table)){}), APIResult.class);
   }
 
   public APIResult addStorageToDimTable(String dimTblName, String table) {
@@ -717,22 +681,20 @@ public class LensMetadataClient {
 
   public APIResult dropAllStoragesOfDimension(String dimTblName) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTblName).path("storages")
+    return target.path("dimtables").path(dimTblName).path("storages")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
       .delete(APIResult.class);
-    return result;
   }
 
 
   public APIResult dropStoragesOfDimensionTable(String dimTblName, String storage) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTblName)
+    return target.path("dimtables").path(dimTblName)
       .path("storages").path(storage)
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
       .delete(APIResult.class);
-    return result;
   }
 
   public List<XPartition> getAllPartitionsOfDimensionTable(String dimTblName, String storage,
@@ -755,13 +717,12 @@ public class LensMetadataClient {
   public APIResult dropAllPartitionsOfDimensionTable(String dimTblName, String storage,
     String filter) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTblName)
+    return target.path("dimtables").path(dimTblName)
       .path("storages").path(storage).path("partitions")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .queryParam("filter", filter)
       .request(MediaType.APPLICATION_XML)
       .delete(APIResult.class);
-    return result;
   }
 
   public APIResult dropAllPartitionsOfDimensionTable(String dimTblName, String storage) {
@@ -772,24 +733,23 @@ public class LensMetadataClient {
     List<String> vals) {
     String values = Joiner.on(",").skipNulls().join(vals);
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTblName)
+    return target.path("dimtables").path(dimTblName)
       .path("storages").path(storage).path("partitions")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .queryParam("values", values)
       .request(MediaType.APPLICATION_XML)
       .delete(APIResult.class);
-    return result;
   }
 
   public APIResult addPartitionToDimensionTable(String dimTblName, String storage,
     XPartition partition) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTblName)
+    return target.path("dimtables").path(dimTblName)
       .path("storages").path(storage).path("partition")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(objFact.createXPartition(partition)), APIResult.class);
-    return result;
+      .post(Entity.xml(new GenericEntity<JAXBElement<XPartition>>(objFact.createXPartition(partition)){}),
+        APIResult.class);
   }
 
   public APIResult addPartitionToDimensionTable(String dimTblName, String storage,
@@ -804,12 +764,12 @@ public class LensMetadataClient {
   public APIResult addPartitionsToDimensionTable(String dimTblName, String storage,
     XPartitionList partitions) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTblName)
+    return target.path("dimtables").path(dimTblName)
       .path("storages").path(storage).path("partitions")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(objFact.createXPartitionList(partitions)), APIResult.class);
-    return result;
+      .post(Entity.xml(new GenericEntity<JAXBElement<XPartitionList>>(objFact.createXPartitionList(partitions)){}),
+        APIResult.class);
   }
 
   public APIResult addPartitionsToDimensionTable(String dimTblName, String storage,
@@ -824,12 +784,12 @@ public class LensMetadataClient {
   public APIResult addPartitionToFactTable(String fact, String storage,
     XPartition partition) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(fact)
+    return target.path("facts").path(fact)
       .path("storages").path(storage).path("partition")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(objFact.createXPartition(partition)), APIResult.class);
-    return result;
+      .post(Entity.xml(new GenericEntity<JAXBElement<XPartition>>(objFact.createXPartition(partition)){}),
+        APIResult.class);
   }
 
   public APIResult addPartitionToFactTable(String fact, String storage,
@@ -844,12 +804,12 @@ public class LensMetadataClient {
   public APIResult addPartitionsToFactTable(String fact, String storage,
     XPartitionList partitions) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(fact)
+    return target.path("facts").path(fact)
       .path("storages").path(storage).path("partitions")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(objFact.createXPartitionList(partitions)), APIResult.class);
-    return result;
+      .post(Entity.xml(new GenericEntity<JAXBElement<XPartitionList>>(objFact.createXPartitionList(partitions)){}),
+        APIResult.class);
   }
 
   public APIResult addPartitionsToFactTable(String fact, String storage,
@@ -864,12 +824,12 @@ public class LensMetadataClient {
   public APIResult updatePartitionOfDimensionTable(String dimTblName, String storage,
     XPartition partition) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTblName)
+    return target.path("dimtables").path(dimTblName)
       .path("storages").path(storage).path("partition")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .put(Entity.xml(objFact.createXPartition(partition)), APIResult.class);
-    return result;
+      .put(Entity.xml(new GenericEntity<JAXBElement<XPartition>>(objFact.createXPartition(partition)){}),
+        APIResult.class);
   }
 
   public APIResult updatePartitionOfDimensionTable(String dimTblName, String storage,
@@ -884,12 +844,12 @@ public class LensMetadataClient {
   public APIResult updatePartitionsOfDimensionTable(String dimTblName, String storage,
     XPartitionList partitions) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("dimtables").path(dimTblName)
+    return target.path("dimtables").path(dimTblName)
       .path("storages").path(storage).path("partitions")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .put(Entity.xml(objFact.createXPartitionList(partitions)), APIResult.class);
-    return result;
+      .put(Entity.xml(new GenericEntity<JAXBElement<XPartitionList>>(objFact.createXPartitionList(partitions)){}),
+        APIResult.class);
   }
 
   public APIResult updatePartitionsOfDimensionTable(String dimTblName, String storage,
@@ -904,12 +864,12 @@ public class LensMetadataClient {
   public APIResult updatePartitionOfFactTable(String fact, String storage,
     XPartition partition) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(fact)
+    return target.path("facts").path(fact)
       .path("storages").path(storage).path("partition")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .put(Entity.xml(objFact.createXPartition(partition)), APIResult.class);
-    return result;
+      .put(Entity.xml(new GenericEntity<JAXBElement<XPartition>>(objFact.createXPartition(partition)){}),
+        APIResult.class);
   }
 
   public APIResult updatePartitionOfFactTable(String fact, String storage,
@@ -924,12 +884,12 @@ public class LensMetadataClient {
   public APIResult updatePartitionsOfFactTable(String fact, String storage,
     XPartitionList partitions) {
     WebTarget target = getMetastoreWebTarget();
-    APIResult result = target.path("facts").path(fact)
+    return target.path("facts").path(fact)
       .path("storages").path(storage).path("partitions")
       .queryParam("sessionid", this.connection.getSessionHandle())
       .request(MediaType.APPLICATION_XML)
-      .put(Entity.xml(objFact.createXPartitionList(partitions)), APIResult.class);
-    return result;
+      .put(Entity.xml(new GenericEntity<JAXBElement<XPartitionList>>(objFact.createXPartitionList(partitions)){}),
+        APIResult.class);
   }
 
   public APIResult updatePartitionsOfFactTable(String fact, String storage,

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensStatement.java b/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
index 8de7708..0009182 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
@@ -28,6 +28,7 @@ import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 
 import org.apache.lens.api.APIResult;
+import org.apache.lens.api.LensConf;
 import org.apache.lens.api.query.*;
 import org.apache.lens.api.query.QueryStatus.Status;
 
@@ -41,11 +42,13 @@ import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataMultiPart;
 
 import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
 
 /**
  * Top level class which is used to execute lens queries.
  */
 @RequiredArgsConstructor
+@Slf4j
 public class LensStatement {
 
   /** The connection. */
@@ -125,7 +128,7 @@ public class LensStatement {
    */
   public LensAPIResult<QueryPrepareHandle> prepareQuery(String sql, String queryName) throws LensAPIException {
     if (!connection.isOpen()) {
-      throw new IllegalStateException("Lens Connection has to be " + "established before querying");
+      throw new IllegalStateException("Lens Connection has to be established before querying");
     }
 
     Client client = connection.buildClient();
@@ -151,7 +154,7 @@ public class LensStatement {
    */
   public LensAPIResult<QueryPlan> explainAndPrepare(String sql, String queryName) throws LensAPIException {
     if (!connection.isOpen()) {
-      throw new IllegalStateException("Lens Connection has to be " + "established before querying");
+      throw new IllegalStateException("Lens Connection has to be established before querying");
     }
 
     Client client = connection.buildClient();
@@ -186,6 +189,8 @@ public class LensStatement {
     if (!StringUtils.isBlank(queryName)) {
       mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("queryName").build(), queryName));
     }
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
+      MediaType.APPLICATION_XML_TYPE));
     return mp;
   }
 
@@ -260,6 +265,7 @@ public class LensStatement {
         .get(LensQuery.class);
       return query;
     } catch (Exception e) {
+      log.error("Failed to get query status, cause:", e);
       throw new IllegalStateException("Failed to get query status, cause:" + e.getMessage());
     }
   }
@@ -277,7 +283,8 @@ public class LensStatement {
       return target.path(handle.toString()).queryParam("sessionid", connection.getSessionHandle()).request()
         .get(LensPreparedQuery.class);
     } catch (Exception e) {
-      throw new IllegalStateException("Failed to get query status, cause:" + e.getMessage());
+      log.error("Failed to get prepared query, cause:", e);
+      throw new IllegalStateException("Failed to get prepared query, cause:" + e.getMessage());
     }
   }
 
@@ -290,7 +297,7 @@ public class LensStatement {
    */
   private LensAPIResult<QueryHandle> executeQuery(String sql, String queryName) throws LensAPIException {
     if (!connection.isOpen()) {
-      throw new IllegalStateException("Lens Connection has to be " + "established before querying");
+      throw new IllegalStateException("Lens Connection has to be established before querying");
     }
 
     Client client  = connection.buildClient();
@@ -301,7 +308,8 @@ public class LensStatement {
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("queryName").build(), queryName == null ? ""
       : queryName));
-
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
+      MediaType.APPLICATION_XML_TYPE));
     WebTarget target = getQueryWebTarget(client);
 
     Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
@@ -333,7 +341,8 @@ public class LensStatement {
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("queryName").build(), queryName == null ? ""
       : queryName));
-
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
+      MediaType.APPLICATION_XML_TYPE));
     QueryHandle handle = target.request()
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), QueryHandle.class);
 
@@ -358,7 +367,8 @@ public class LensStatement {
       .getSessionHandle(), MediaType.APPLICATION_XML_TYPE));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), sql));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "explain"));
-
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
+      MediaType.APPLICATION_XML_TYPE));
     WebTarget target = getQueryWebTarget(client);
 
     Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
@@ -432,6 +442,7 @@ public class LensStatement {
       return target.path(query.getQueryHandle().toString()).path("resultsetmetadata")
         .queryParam("sessionid", connection.getSessionHandle()).request().get(QueryResultSetMetadata.class);
     } catch (Exception e) {
+      log.error("Failed to get resultset metadata, cause:", e);
       throw new IllegalStateException("Failed to get resultset metadata, cause:" + e.getMessage());
     }
   }
@@ -452,15 +463,17 @@ public class LensStatement {
    */
   public QueryResult getResultSet(LensQuery query) {
     if (query.getStatus().getStatus() != QueryStatus.Status.SUCCESSFUL) {
-      throw new IllegalArgumentException("Result set metadata " + "can be only queries for successful queries");
+      throw new IllegalArgumentException("Result set metadata can be only queries for successful queries");
     }
     Client client = connection.buildClient();
 
     try {
       WebTarget target = getQueryWebTarget(client);
       return target.path(query.getQueryHandle().toString()).path("resultset")
-        .queryParam("sessionid", connection.getSessionHandle()).request().get(QueryResult.class);
+        .queryParam("sessionid", connection.getSessionHandle()).request(MediaType.APPLICATION_XML_TYPE).get(
+          QueryResult.class);
     } catch (Exception e) {
+      log.error("Failed to get resultset, cause:", e);
       throw new IllegalStateException("Failed to get resultset, cause:" + e.getMessage());
     }
   }
@@ -482,7 +495,8 @@ public class LensStatement {
       return target.path(query.getQueryHandle().toString()).path("httpresultset")
         .queryParam("sessionid", connection.getSessionHandle()).request().get();
     } catch (Exception e) {
-      throw new IllegalStateException("Failed to get resultset, cause:" + e.getMessage());
+      log.error("Failed to get http resultset, cause:", e);
+      throw new IllegalStateException("Failed to get http resultset, cause:" + e.getMessage());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java b/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java
index 1fbd358..b6385d4 100644
--- a/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java
+++ b/lens-examples/src/main/java/org/apache/lens/examples/SampleMetastore.java
@@ -185,6 +185,9 @@ public class SampleMetastore {
       if (metastore.retCode != 0) {
         System.exit(metastore.retCode);
       }
+    } catch (Throwable th) {
+      log.error("Error during creating sample metastore", th);
+      throw th;
     } finally {
       if (metastore != null) {
         metastore.close();

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/city.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/city.xml b/lens-examples/src/main/resources/city.xml
index 1a89a2a..fe89ca7 100644
--- a/lens-examples/src/main/resources/city.xml
+++ b/lens-examples/src/main/resources/city.xml
@@ -22,10 +22,10 @@
 <x_dimension name="city" xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
   xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <attributes>
-    <dim_attribute name="id" type="INT"/>
-    <dim_attribute name="name" type="STRING"/>
-    <dim_attribute name="POI" type="ARRAY&lt;STRING&gt;" description="Point of interests"/>
-    <dim_attribute name="population" type="BIGINT" />
+    <dim_attribute name="id" _type="INT"/>
+    <dim_attribute name="name" _type="STRING"/>
+    <dim_attribute name="POI" _type="ARRAY&lt;STRING&gt;" description="Point of interests"/>
+    <dim_attribute name="population" _type="BIGINT" />
   </attributes>
   <properties>
     <property name="dimension.city.timed.dimension" value="dt"/>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/city_subset.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/city_subset.xml b/lens-examples/src/main/resources/city_subset.xml
index 18c7847..e39e5c3 100644
--- a/lens-examples/src/main/resources/city_subset.xml
+++ b/lens-examples/src/main/resources/city_subset.xml
@@ -22,8 +22,8 @@
 <x_dimension_table dimension_name="city" table_name="city_subset" weight="100.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="ID" name="id" type="INT"/>
-    <column comment="name" name="name" type="STRING"/>
+    <column comment="ID" name="id" _type="INT"/>
+    <column comment="name" name="name" _type="STRING"/>
   </columns>
   <storage_tables>
     <storage_table>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/city_table.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/city_table.xml b/lens-examples/src/main/resources/city_table.xml
index 27be305..1f9f152 100644
--- a/lens-examples/src/main/resources/city_table.xml
+++ b/lens-examples/src/main/resources/city_table.xml
@@ -22,10 +22,10 @@
 <x_dimension_table dimension_name="city" table_name="city_table" weight="10.0" xmlns="uri:lens:cube:0.1"
   xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <columns>
-    <column comment="ID" name="id" type="INT"/>
-    <column comment="name" name="name" type="STRING"/>
-    <column comment="Point of interests" name="POI" type="ARRAY&lt;STRING&gt;"/>
-    <column comment="city population" name="population" type="BIGINT"/>
+    <column comment="ID" name="id" _type="INT"/>
+    <column comment="name" name="name" _type="STRING"/>
+    <column comment="Point of interests" name="POI" _type="ARRAY&lt;STRING&gt;"/>
+    <column comment="city population" name="population" _type="BIGINT"/>
   </columns>
   <properties>
     <property name="city.prop" value="d1"/>
@@ -38,7 +38,7 @@
       <storage_name>local</storage_name>
       <table_desc external="true" field_delimiter="," collection_delimiter=":" table_location="/tmp/examples/city">
         <part_cols>
-          <column comment="Time column" name="dt" type="STRING"/>
+          <column comment="Time column" name="dt" _type="STRING"/>
         </part_cols>
         <time_part_cols>dt</time_part_cols>
       </table_desc>

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-examples/src/main/resources/customer.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/customer.xml b/lens-examples/src/main/resources/customer.xml
index c6182a2..0bb66f6 100644
--- a/lens-examples/src/main/resources/customer.xml
+++ b/lens-examples/src/main/resources/customer.xml
@@ -22,19 +22,19 @@
 <x_dimension name="customer" xmlns="uri:lens:cube:0.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
   xsi:schemaLocation="uri:lens:cube:0.1 cube-0.1.xsd ">
   <attributes>
-    <dim_attribute name="id" type="INT"/>
-    <dim_attribute name="name" type="STRING"/>
-    <dim_attribute name="gender" type="STRING">
+    <dim_attribute name="id" _type="INT"/>
+    <dim_attribute name="name" _type="STRING"/>
+    <dim_attribute name="gender" _type="STRING">
       <values>M</values>
       <values>F</values>
     </dim_attribute>
-    <dim_attribute name="age" type="INT" />
-    <dim_attribute name="city_id" type="INT" />
-    <dim_attribute name="customer_city_name" type="string" description="City name to which the customer belongs"
+    <dim_attribute name="age" _type="INT" />
+    <dim_attribute name="city_id" _type="INT" />
+    <dim_attribute name="customer_city_name" _type="string" description="City name to which the customer belongs"
       display_string="Customer City">
       <chain_ref_column chain_name="customer_city" ref_col="name" />
     </dim_attribute>
-    <dim_attribute name="customer_credit_status" type="STRING" start_time='2015-03-01T00:00:00'/>
+    <dim_attribute name="customer_credit_status" _type="STRING" start_time='2015-03-01T00:00:00'/>
   </attributes>
   <join_chains>
     <join_chain name="customer_city">


[32/51] [abbrv] lens git commit: LENS-924 : Remove edits to LensSession's default config

Posted by de...@apache.org.
LENS-924 : Remove edits to LensSession's default config


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/5d2dccb0
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/5d2dccb0
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/5d2dccb0

Branch: refs/heads/current-release-line
Commit: 5d2dccb00606844a877b7ff379e80ae0a3b965a9
Parents: edcdd96
Author: Puneet Gupta <pu...@gmail.com>
Authored: Tue Jan 19 10:14:31 2016 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Jan 19 10:14:31 2016 +0530

----------------------------------------------------------------------
 .../apache/lens/server/session/LensSessionImpl.java |  5 +++--
 .../apache/lens/server/query/TestQueryService.java  | 16 ++++++++++++++++
 2 files changed, 19 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/5d2dccb0/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java b/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
index cc62d92..895a819 100644
--- a/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
@@ -66,7 +66,7 @@ public class LensSessionImpl extends HiveSessionImpl {
   private long sessionTimeout;
 
   /** The conf. */
-  private Configuration conf = new Configuration(createDefaultConf());
+  private Configuration conf = createDefaultConf();
 
   /**
    * Keep track of DB static resources which failed to be added to this session
@@ -123,7 +123,8 @@ public class LensSessionImpl extends HiveSessionImpl {
         }
       }
     }
-    return sessionDefaultConfig;
+    //Not exposing sessionDefaultConfig directly to insulate it form modifications
+    return new Configuration(sessionDefaultConfig);
   }
 
   /** The default hive session conf. */

http://git-wip-us.apache.org/repos/asf/lens/blob/5d2dccb0/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index c5d75de..494bce5 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -1254,6 +1254,22 @@ public class TestQueryService extends LensJerseyTest {
       // server configuration should not set
       assertNull(dconf.get("lens.server.persist.location"));
     }
+
+    checkDefaultConfigConsistency();
+  }
+
+  public void checkDefaultConfigConsistency() {
+    Configuration conf = LensSessionImpl.createDefaultConf();
+    assertNotNull(conf.get("lens.query.enable.persistent.resultset"));
+    boolean isDriverPersistent = conf.getBoolean("lens.query.enable.persistent.resultset", false);
+    conf.setBoolean("lens.query.enable.persistent.resultset", isDriverPersistent ? false : true);
+    conf.set("new_random_property", "new_random_property");
+
+    // Get the default conf again and verify its not modified by previous operations
+    conf = LensSessionImpl.createDefaultConf();
+    boolean isDriverPersistentNow = conf.getBoolean("lens.query.enable.persistent.resultset", false);
+    assertEquals(isDriverPersistentNow, isDriverPersistent);
+    assertNull(conf.get("new_random_property"));
   }
 
   /**


[18/51] [abbrv] lens git commit: LENS-896 : Fix setting of baseurl on jdbc client

Posted by de...@apache.org.
LENS-896 : Fix setting of baseurl on jdbc client


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/d5e923e2
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/d5e923e2
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/d5e923e2

Branch: refs/heads/current-release-line
Commit: d5e923e25b00c4f718d971706cac916dcafd5a72
Parents: fdf04be
Author: Piyush <pi...@gmail.com>
Authored: Thu Jan 7 11:53:52 2016 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Thu Jan 7 11:53:52 2016 +0530

----------------------------------------------------------------------
 .../java/org/apache/lens/client/jdbc/JDBCUtils.java | 16 +++++++++++-----
 .../org/apache/lens/jdbc/JDBCUrlParserTest.java     |  7 +++----
 2 files changed, 14 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/d5e923e2/lens-client/src/main/java/org/apache/lens/client/jdbc/JDBCUtils.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/jdbc/JDBCUtils.java b/lens-client/src/main/java/org/apache/lens/client/jdbc/JDBCUtils.java
index 34448d3..4c32610 100644
--- a/lens-client/src/main/java/org/apache/lens/client/jdbc/JDBCUtils.java
+++ b/lens-client/src/main/java/org/apache/lens/client/jdbc/JDBCUtils.java
@@ -32,6 +32,9 @@ import java.util.jar.Manifest;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import javax.ws.rs.core.UriBuilder;
+
+import org.apache.lens.client.LensClientConfig;
 import org.apache.lens.client.LensConnectionParams;
 
 /**
@@ -97,11 +100,6 @@ public final class JDBCUtils {
     }
 
     URI jdbcUri = URI.create(uri.substring(URI_JDBC_PREFIX.length()));
-
-    /*
-     * if (jdbcUri.getHost() != null) { params.setHost(jdbcUri.getHost()); } if (jdbcUri.getPort() > 0) {
-     * params.setPort(jdbcUri.getPort()); }
-     */
     Pattern pattern = Pattern.compile(KEY_VALUE_REGEX);
     // dbname and session settings
     String sessVars = jdbcUri.getPath();
@@ -145,6 +143,14 @@ public final class JDBCUtils {
         params.getLensVars().put(varMatcher.group(1), varMatcher.group(2));
       }
     }
+    UriBuilder baseUriBuilder = UriBuilder.fromUri(LensClientConfig.DEFAULT_SERVER_BASE_URL);
+    if (jdbcUri.getHost() != null) {
+      baseUriBuilder.host(jdbcUri.getHost());
+    }
+    if (jdbcUri.getPort() != -1) {
+      baseUriBuilder.port(jdbcUri.getPort());
+    }
+    params.setBaseUrl(baseUriBuilder.build().toString());
     return params;
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d5e923e2/lens-client/src/test/java/org/apache/lens/jdbc/JDBCUrlParserTest.java
----------------------------------------------------------------------
diff --git a/lens-client/src/test/java/org/apache/lens/jdbc/JDBCUrlParserTest.java b/lens-client/src/test/java/org/apache/lens/jdbc/JDBCUrlParserTest.java
index 3c97878..c8b51b9 100644
--- a/lens-client/src/test/java/org/apache/lens/jdbc/JDBCUrlParserTest.java
+++ b/lens-client/src/test/java/org/apache/lens/jdbc/JDBCUrlParserTest.java
@@ -78,10 +78,9 @@ public class JDBCUrlParserTest {
   public void testJDBCWithCustomHostAndPortAndDB() {
     String uri = "jdbc:lens://myhost:9000/mydb";
     LensConnectionParams params = JDBCUtils.parseUrl(uri);
-    // Assert.assertEquals( "myhost",
-    // params.getHost(),"The host name should be myhost");
-    // Assert.assertEquals( 9000, params.getPort(),"The port should be 9000");
-    Assert.assertEquals("mydb", params.getDbName(), "The database should be mydb");
+    Assert.assertEquals(params.getBaseConnectionUrl(), "http://myhost:9000/lensapi",
+      "The base url  should be http://myhost:9000/lensapi");
+    Assert.assertEquals(params.getDbName(), "mydb", "The database should be mydb");
     Assert.assertTrue(params.getSessionVars().isEmpty(), "Session Variable list should be empty");
     Assert.assertTrue(params.getLensConfs().isEmpty(), "The conf list should be empty");
     Assert.assertTrue(params.getLensVars().isEmpty(), "The lens var list should be empty");


[10/51] [abbrv] lens git commit: LENS-903 : No candidate dim available exception should contain only brief error

Posted by de...@apache.org.
LENS-903 : No candidate dim available exception should contain only brief error


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/b84cb2cd
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/b84cb2cd
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/b84cb2cd

Branch: refs/heads/current-release-line
Commit: b84cb2cd32a53806278bbe5d45028dbaa760bdf5
Parents: 4d3d2f8
Author: Sushil Mohanty <su...@apache.org>
Authored: Thu Dec 17 17:31:04 2015 +0530
Committer: Sushil Mohanty <su...@apache.org>
Committed: Thu Dec 17 17:31:04 2015 +0530

----------------------------------------------------------------------
 .../lens/cube/parse/CubeQueryContext.java       |  7 ++++---
 .../lens/cube/parse/TestCubeRewriter.java       | 22 +++++++++++++++++++-
 2 files changed, 25 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/b84cb2cd/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index f75a6b9..4034a54 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -32,6 +32,7 @@ import java.io.IOException;
 import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.error.NoCandidateDimAvailableException;
 import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
@@ -810,12 +811,12 @@ public class CubeQueryContext implements TrackQueriedColumns {
               }
             }
           }
-          throw new LensException(LensCubeErrorCode.NO_CANDIDATE_DIM_AVAILABLE.getLensErrorInfo(),
-              dim.getName(), reason);
+          log.error("Query rewrite failed due to NO_CANDIDATE_DIM_AVAILABLE, Cause {}",
+                  dimPruningMsgs.get(dim).toJsonObject());
+          throw new NoCandidateDimAvailableException(dimPruningMsgs.get(dim));
         }
       }
     }
-
     return dimsToQuery;
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b84cb2cd/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 802ff42..9a08735 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -32,6 +32,7 @@ import java.text.SimpleDateFormat;
 import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.error.NoCandidateDimAvailableException;
 import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCause;
@@ -46,6 +47,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 
@@ -1078,6 +1080,23 @@ public class TestCubeRewriter extends TestQueryRewrite {
   }
 
   @Test
+  public void testNoCandidateDimAvailableExceptionCompare() throws Exception {
+
+    //Max cause COLUMN_NOT_FOUND, Ordinal 9
+    PruneCauses<CubeDimensionTable> pr1 = new PruneCauses<CubeDimensionTable>();
+    pr1.addPruningMsg(new CubeDimensionTable(new Table("test", "citydim")),
+            CandidateTablePruneCause.columnNotFound("test1", "test2", "test3"));
+    NoCandidateDimAvailableException ne1 = new NoCandidateDimAvailableException(pr1);
+
+    //Max cause EXPRESSION_NOT_EVALUABLE, Ordinal 6
+    PruneCauses<CubeDimensionTable> pr2 = new PruneCauses<CubeDimensionTable>();
+    pr2.addPruningMsg(new CubeDimensionTable(new Table("test", "citydim")),
+            CandidateTablePruneCause.expressionNotEvaluable("testexp1", "testexp2"));
+    NoCandidateDimAvailableException ne2 = new NoCandidateDimAvailableException(pr2);
+    assertEquals(ne1.compareTo(ne2), 3);
+  }
+
+  @Test
   public void testDimensionQueryWithMultipleStorages() throws Exception {
     String hqlQuery = rewrite("select name, stateid from" + " citydim", getConf());
     String expected =
@@ -1095,7 +1114,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     // state table is present on c1 with partition dumps and partitions added
     LensException e = getLensExceptionInRewrite("select name, capital from statedim ", conf);
     assertEquals(e.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_DIM_AVAILABLE.getLensErrorInfo().getErrorCode());
-    assertEquals(extractPruneCause(e), new PruneCauses.BriefAndDetailedError(
+    NoCandidateDimAvailableException ne = (NoCandidateDimAvailableException) e;
+    assertEquals(ne.getJsonMessage(), new PruneCauses.BriefAndDetailedError(
       NO_CANDIDATE_STORAGES.errorFormat,
       new HashMap<String, List<CandidateTablePruneCause>>() {
         {


[22/51] [abbrv] lens git commit: LENS-735 : Remove accepting TableReferences for ReferenceDimAttribute

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
index e0c0923..b0044da 100644
--- a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
@@ -309,8 +309,7 @@ public class TestMetastoreService extends LensJerseyTest {
     XChainColumn xcc = new XChainColumn();
     xcc.setChainName("chain1");
     xcc.setRefCol("col2");
-    xd3.setRefSpec(cubeObjectFactory.createXDimAttributeRefSpec());
-    xd3.getRefSpec().getChainRefColumn().add(xcc);
+    xd3.getChainRefColumn().add(xcc);
     xd3.setNumDistinctValues(1000L);
 
     // add attribute with complex type
@@ -602,7 +601,7 @@ public class TestMetastoreService extends LensJerseyTest {
       boolean chainValidated = false;
       for (XDimAttribute attr : actual.getDimAttributes().getDimAttribute()) {
         if (attr.getName().equalsIgnoreCase("testdim2col2")) {
-          assertEquals(attr.getRefSpec().getChainRefColumn().get(0).getDestTable(), "testdim");
+          assertEquals(attr.getChainRefColumn().get(0).getDestTable(), "testdim");
           chainValidated = true;
           break;
         }
@@ -619,7 +618,7 @@ public class TestMetastoreService extends LensJerseyTest {
       assertEquals(hcube.getDimAttributeByName("testdim2col2").getDescription(), "ref chained dimension");
       assertEquals(((BaseDimAttribute) hcube.getDimAttributeByName("dim4")).getType(),
         "struct<a:int,b:array<string>,c:map<int,array<struct<x:int,y:array<int>>>");
-      ReferencedDimAtrribute testdim2col2 = (ReferencedDimAtrribute) hcube.getDimAttributeByName("testdim2col2");
+      ReferencedDimAttribute testdim2col2 = (ReferencedDimAttribute) hcube.getDimAttributeByName("testdim2col2");
       assertEquals(testdim2col2.getType(), "string");
       assertEquals(testdim2col2.getChainRefColumns().get(0).getChainName(), "chain1");
       assertEquals(testdim2col2.getChainRefColumns().get(0).getRefColumn(), "col2");
@@ -1095,8 +1094,7 @@ public class TestMetastoreService extends LensJerseyTest {
     XChainColumn xcc = new XChainColumn();
     xcc.setChainName("chain1");
     xcc.setRefCol("col2");
-    hd3.setRefSpec(cubeObjectFactory.createXDimAttributeRefSpec());
-    hd3.getRefSpec().getChainRefColumn().add(xcc);
+    hd3.getChainRefColumn().add(xcc);
     hd3.setNumDistinctValues(1000L);
     hierarchy.getDimAttribute().add(hd3);
     xd4.setHierarchy(hierarchy);
@@ -1106,8 +1104,7 @@ public class TestMetastoreService extends LensJerseyTest {
     xd5.setType("INT");
     xd5.setDescription("ref column");
     xd5.setDisplayString("Column5");
-    xd5.setRefSpec(cubeObjectFactory.createXDimAttributeRefSpec());
-    xd5.getRefSpec().getChainRefColumn().add(xcc);
+    xd5.getChainRefColumn().add(xcc);
     xd5.getValues().add("1");
     xd5.getValues().add("2");
     xd5.getValues().add("3");
@@ -1225,7 +1222,7 @@ public class TestMetastoreService extends LensJerseyTest {
       assertEquals(col4h2.getType(), "string");
       assertEquals(col4h2.getDescription(), "base column");
       assertEquals(col4h2.getDisplayString(), "Column4-h2");
-      ReferencedDimAtrribute col4h3 = (ReferencedDimAtrribute) col4.getHierarchy().get(2);
+      ReferencedDimAttribute col4h3 = (ReferencedDimAttribute) col4.getHierarchy().get(2);
       assertEquals(col4h3.getName(), "col4-h3");
       assertEquals(col4h3.getDescription(), "ref column");
       assertEquals(col4h3.getDisplayString(), "Column4-h3");
@@ -1234,7 +1231,7 @@ public class TestMetastoreService extends LensJerseyTest {
       assertEquals(col4h3.getChainRefColumns().get(0).getRefColumn(), "col2");
       assertEquals(col4h3.getNumOfDistinctValues().get(), (Long) 1000L);
       assertNotNull(dim.getAttributeByName("col5"));
-      ReferencedDimAtrribute col5 = (ReferencedDimAtrribute) dim.getAttributeByName("col5");
+      ReferencedDimAttribute col5 = (ReferencedDimAttribute) dim.getAttributeByName("col5");
       assertEquals(col5.getDescription(), "ref column");
       assertEquals(col5.getDisplayString(), "Column5");
       assertEquals(col5.getType(), "int");

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-storage-db/src/test/java/org/apache/lens/storage/db/TestDBStorage.java
----------------------------------------------------------------------
diff --git a/lens-storage-db/src/test/java/org/apache/lens/storage/db/TestDBStorage.java b/lens-storage-db/src/test/java/org/apache/lens/storage/db/TestDBStorage.java
index 92a0027..55c32e8 100644
--- a/lens-storage-db/src/test/java/org/apache/lens/storage/db/TestDBStorage.java
+++ b/lens-storage-db/src/test/java/org/apache/lens/storage/db/TestDBStorage.java
@@ -63,13 +63,18 @@ public class TestDBStorage {
   /**
    * The db1.
    */
-  Storage db1 = new DBStorage(DB_STORAGE1, DB_STORAGE1, null);
+  Storage db1;
 
   /**
    * The db2.
    */
-  Storage db2 = new DBStorage(DB_STORAGE2, DB_STORAGE2, null);
+  Storage db2;
 
+  TestDBStorage() throws Exception {
+    db1 = new DBStorage(DB_STORAGE1, DB_STORAGE1, null);
+    db2 = new DBStorage(DB_STORAGE2, DB_STORAGE2, null);
+
+  }
   /**
    * Setup.
    *
@@ -105,7 +110,7 @@ public class TestDBStorage {
    * @throws HiveException the hive exception
    */
   @Test(groups = "first")
-  public void testDBStorage() throws HiveException {
+  public void testDBStorage() throws Exception {
     CubeMetastoreClient cc = CubeMetastoreClient.getInstance(conf);
     if (!cc.tableExists(DB_STORAGE1)) {
       cc.createStorage(db1);


[36/51] [abbrv] lens git commit: LENS-929: Fact skipping should be logged

Posted by de...@apache.org.
LENS-929: Fact skipping should be logged


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/b1f38d55
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/b1f38d55
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/b1f38d55

Branch: refs/heads/current-release-line
Commit: b1f38d55eddf261426aa47bfaa37c54a1eefa3d0
Parents: 7035de9
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Mon Jan 25 14:03:55 2016 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Mon Jan 25 14:03:55 2016 +0530

----------------------------------------------------------------------
 .../apache/lens/cube/parse/CubeQueryContext.java    |  1 +
 .../server/api/driver/MinQueryCostSelector.java     | 16 ++++++++++++----
 2 files changed, 13 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/b1f38d55/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 3e930de..79dd88c 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -465,6 +465,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   }
 
   public void addFactPruningMsgs(CubeFactTable fact, CandidateTablePruneCause factPruningMsg) {
+    log.info("Pruning fact {} with cause: {}", fact, factPruningMsg);
     factPruningMsgs.addPruningMsg(fact, factPruningMsg);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b1f38d55/lens-server-api/src/main/java/org/apache/lens/server/api/driver/MinQueryCostSelector.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/MinQueryCostSelector.java b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/MinQueryCostSelector.java
index 8fdde1d..6f17327 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/MinQueryCostSelector.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/MinQueryCostSelector.java
@@ -18,13 +18,18 @@
  */
 package org.apache.lens.server.api.driver;
 
-import java.util.*;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
 
 import org.apache.lens.server.api.query.AbstractQueryContext;
 import org.apache.lens.server.api.query.cost.QueryCost;
 
 import org.apache.hadoop.conf.Configuration;
 
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
 public class MinQueryCostSelector implements DriverSelector {
 
   /**
@@ -38,12 +43,15 @@ public class MinQueryCostSelector implements DriverSelector {
   public LensDriver select(final AbstractQueryContext ctx, final Configuration conf) {
 
     final Collection<LensDriver> drivers = ctx.getDriverContext().getDriversWithValidQueryCost();
-
+    log.info("Candidate drivers: {}", drivers);
+    for (LensDriver driver : drivers) {
+      log.debug("Cost on driver {}: {}", driver, ctx.getDriverQueryCost(driver));
+    }
     return Collections.min(drivers, new Comparator<LensDriver>() {
       @Override
       public int compare(LensDriver d1, LensDriver d2) {
-        final QueryCost c1 = ctx.getDriverContext().getDriverQueryCost(d1);
-        final QueryCost c2 = ctx.getDriverContext().getDriverQueryCost(d2);
+        final QueryCost c1 = ctx.getDriverQueryCost(d1);
+        final QueryCost c2 = ctx.getDriverQueryCost(d2);
         return c1.compareTo(c2);
       }
     });


[35/51] [abbrv] lens git commit: LENS-927 : Fix intermittent test failure due to alias look up in JoinTree

Posted by de...@apache.org.
LENS-927 : Fix intermittent test failure due to alias look up in JoinTree


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/7035de9d
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/7035de9d
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/7035de9d

Branch: refs/heads/current-release-line
Commit: 7035de9d73f2df1e72111d43875b0e21d82feaf5
Parents: 27a0cad
Author: Amareshwari Sriramadasu <am...@gmail.com>
Authored: Mon Jan 25 14:00:31 2016 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Mon Jan 25 14:00:31 2016 +0530

----------------------------------------------------------------------
 .../main/java/org/apache/lens/cube/parse/join/JoinClause.java | 7 ++-----
 .../main/java/org/apache/lens/cube/parse/join/JoinTree.java   | 6 ++----
 .../apache/lens/cube/parse/TestDenormalizationResolver.java   | 4 ++--
 3 files changed, 6 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/7035de9d/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java
index acc9d5c..4325252 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java
@@ -122,13 +122,10 @@ public class JoinClause implements Comparable<JoinClause> {
       // Last element in this list is link from cube to first dimension
       for (int i = entry.getValue().size() - 1; i >= 0; i--) {
         // Adds a child if needed, or returns a child already existing corresponding to the given link.
-        current = current.addChild(entry.getValue().get(i), cubeql, aliasUsage);
+        current = current.addChild(entry.getValue().get(i), aliasUsage);
       }
       // This is a destination table. Decide alias separately. e.g. chainname
-      // nullcheck is necessary because dimensions can be destinations too. In that case getAlias() == null
-      if (entry.getKey().getAlias() != null) {
-        current.setAlias(entry.getKey().getAlias());
-      }
+      current.setAlias(entry.getKey().getAlias());
     }
     if (root.getSubtrees().size() > 0) {
       root.setAlias(cubeql.getAliasForTableName(

http://git-wip-us.apache.org/repos/asf/lens/blob/7035de9d/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinTree.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinTree.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinTree.java
index 197847c..bcbfed5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinTree.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinTree.java
@@ -22,7 +22,6 @@ import java.util.*;
 
 import org.apache.lens.cube.metadata.AbstractCubeTable;
 import org.apache.lens.cube.metadata.join.TableRelationship;
-import org.apache.lens.cube.parse.CubeQueryContext;
 
 import org.apache.hadoop.hive.ql.parse.JoinType;
 
@@ -57,8 +56,7 @@ public class JoinTree {
     this.depthFromRoot = depthFromRoot;
   }
 
-  public JoinTree addChild(TableRelationship tableRelationship,
-                           CubeQueryContext query, Map<String, Integer> aliasUsage) {
+  public JoinTree addChild(TableRelationship tableRelationship, Map<String, Integer> aliasUsage) {
     if (getSubtrees().get(tableRelationship) == null) {
       JoinTree current = new JoinTree(this, tableRelationship,
         this.depthFromRoot + 1);
@@ -68,7 +66,7 @@ public class JoinTree {
       // And for destination tables, an alias will be decided from here but might be
       // overridden outside this function.
       AbstractCubeTable destTable = tableRelationship.getToTable();
-      current.setAlias(query.getAliasForTableName(destTable.getName()));
+      current.setAlias(destTable.getName());
       if (aliasUsage.get(current.getAlias()) == null) {
         aliasUsage.put(current.getAlias(), 0);
       } else {

http://git-wip-us.apache.org/repos/asf/lens/blob/7035de9d/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
index d7707a9..51ba636 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
@@ -89,8 +89,8 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
     String expected =
       getExpectedQuery(cubeName,
         "select dim3chain.name, testcube.dim2big1, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
-          + getDbName() + "c2_testdim2tbl3 dim2chain " + "on testcube.dim2big1 = dim2chain.bigid1" + " join "
-          + getDbName() + "c2_testdim3tbl dim3chain on " + "dim2chain.testdim3id = dim3chain.id", null,
+          + getDbName() + "c2_testdim2tbl3 testdim2 " + "on testcube.dim2big1 = testdim2.bigid1" + " join "
+          + getDbName() + "c2_testdim3tbl dim3chain on " + "testdim2.testdim3id = dim3chain.id", null,
         " group by dim3chain.name, (testcube.dim2big1)", null,
         getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary4"),
         null);


[29/51] [abbrv] lens git commit: LENS-851 : Fix aliasing for non-aggregate functions in multi fact union query in where clause

Posted by de...@apache.org.
LENS-851 : Fix aliasing for non-aggregate functions in multi fact union query in where clause


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/9c03c76e
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/9c03c76e
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/9c03c76e

Branch: refs/heads/current-release-line
Commit: 9c03c76e6d79b1b45d79512b28bf021c52a007b3
Parents: 908530f
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Tue Jan 12 09:58:27 2016 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Jan 12 09:58:27 2016 +0530

----------------------------------------------------------------------
 .../lens/cube/parse/SingleFactMultiStorageHQLContext.java      | 2 +-
 .../test/java/org/apache/lens/cube/parse/TestCubeRewriter.java | 6 ++++--
 2 files changed, 5 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/9c03c76e/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
index ac56328..7e3a0bf 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
@@ -220,7 +220,7 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     if (astNode == null) {
       return null;
     }
-    if (isAggregateAST(astNode) || isTableColumnAST(astNode)) {
+    if (isAggregateAST(astNode) || isTableColumnAST(astNode) || isNonAggregateFunctionAST(astNode)) {
       if (innerToOuterASTs.containsKey(new HashableASTNode(astNode))) {
         ASTNode ret = innerToOuterASTs.get(new HashableASTNode(astNode));
         // Set parent null for quicker GC

http://git-wip-us.apache.org/repos/asf/lens/blob/9c03c76e/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 4810559..698f36c 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -419,7 +419,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
       conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, true);
 
       hqlQuery = rewrite("select ascii(cityid) as `City ID`, msr8, msr7 as `Third measure` "
-        + "from testCube where cityid = 'a' and zipcode = 'b' and " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
+        + "from testCube where ascii(cityid) = 'c' and cityid = 'a' and zipcode = 'b' and "
+        + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
 
       expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT testcube.alias0 as `City ID`, sum(testcube.alias1) + max(testcube.alias2), "
@@ -429,7 +430,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
         "select ascii(testcube.cityid) as `alias0`, sum(testcube.msr2) as `alias1`, "
           + "max(testcube.msr3) as `alias2`, "
           + "sum(case when testcube.cityid = 'x' then testcube.msr21 else testcube.msr22 end) as `alias3`",
-        "testcube.cityid = 'a' and testcube.zipcode = 'b'", "group by ascii(testcube.cityid)");
+        "testcube.alias0 = 'c' and testcube.cityid = 'a' and testcube.zipcode = 'b'",
+        "group by ascii(testcube.cityid)");
 
       compareQueries(hqlQuery, expected);
 


[02/51] [abbrv] lens git commit: LENS-885: Cleanup of Cube test cases

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index fea70b7..3be9406 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -19,12 +19,11 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.metadata.UpdatePeriod.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.*;
-import static org.apache.lens.cube.parse.CubeQueryConfUtil.getValidStorageTablesKey;
-import static org.apache.lens.cube.parse.CubeQueryConfUtil.getValidUpdatePeriodsKey;
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
-import static org.apache.lens.cube.parse.CubeTestSetup.getWhereForMonthlyDailyAndHourly2monthsUnionQuery;
 
 import static org.testng.Assert.*;
 
@@ -36,6 +35,7 @@ import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCause;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode;
+import org.apache.lens.server.api.LensServerAPITestUtil;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.time.DateUtils;
@@ -61,18 +61,16 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 public class TestCubeRewriter extends TestQueryRewrite {
 
-  private final String cubeName = CubeTestSetup.TEST_CUBE_NAME;
-
   private Configuration conf;
 
   @BeforeTest
   public void setupDriver() throws Exception {
-    conf = new Configuration();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C0,C1,C2");
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, true);
-    conf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, true);
-    conf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, true);
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, false);
+    conf = LensServerAPITestUtil.getConfiguration(
+      DRIVER_SUPPORTED_STORAGES, "C0,C1,C2",
+      DISABLE_AUTO_JOINS, true,
+      ENABLE_SELECT_TO_GROUPBY, true,
+      ENABLE_GROUP_BY_TO_SELECT, true,
+      DISABLE_AGGREGATE_RESOLVER, false);
   }
 
   @Override
@@ -83,7 +81,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
   @Test
   public void testQueryWithNow() throws Exception {
     LensException e = getLensExceptionInRewrite(
-      "select SUM(msr2) from testCube where" + " time_range_in(d_time, 'NOW - 2DAYS', 'NOW')", getConf());
+      "select SUM(msr2) from testCube where " + getTimeRangeString("NOW - 2DAYS", "NOW"), getConf());
     assertEquals(e.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
   }
 
@@ -93,19 +91,12 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.setClass(CubeQueryConfUtil.TIME_RANGE_WRITER_CLASS, BetweenTimeRangeWriter.class, TimeRangeWriter.class);
 
     DateFormat qFmt = new SimpleDateFormat("yyyy-MM-dd-HH:mm:ss");
-    Calendar qCal = Calendar.getInstance();
-    Date toDate = qCal.getTime();
-    String qTo = qFmt.format(toDate);
-    qCal.setTime(TWODAYS_BACK);
-    Date from2DaysBackDate = qCal.getTime();
-    String qFrom = qFmt.format(from2DaysBackDate);
-
-    CubeQueryContext rewrittenQuery = rewriteCtx("select SUM(msr15) from testCube where"
-      + " time_range_in(d_time, '" + qFrom + "', '" + qTo + "')", conf);
+    String timeRangeString;
+    timeRangeString = getTimeRangeString(DAILY, -2, 0, qFmt);
+    CubeQueryContext rewrittenQuery = rewriteCtx("select SUM(msr15) from testCube where " + timeRangeString, conf);
 
-    DateFormat fmt = UpdatePeriod.CONTINUOUS.format();
-    String to = fmt.format(toDate);
-    String from = fmt.format(from2DaysBackDate);
+    String to = getDateStringWithOffset(DAILY, 0, CONTINUOUS);
+    String from = getDateStringWithOffset(DAILY, -2, CONTINUOUS);
 
     String expected = "select SUM((testCube.msr15)) from TestQueryRewrite.c0_testFact_CONTINUOUS testcube"
       + " WHERE ((( testcube . dt ) between  '" + from + "'  and  '" + to + "' ))";
@@ -114,20 +105,17 @@ public class TestCubeRewriter extends TestQueryRewrite {
     compareQueries(rewrittenQuery.toHQL(), expected);
 
     //test with msr2 on different fact
-    rewrittenQuery = rewriteCtx("select SUM(msr2) from testCube where" + " time_range_in(d_time, '"
-      + qFrom + "', '" + qTo + "')", conf);
+    rewrittenQuery = rewriteCtx("select SUM(msr2) from testCube where " + timeRangeString, conf);
     expected = "select SUM((testCube.msr2)) from TestQueryRewrite.c0_testFact testcube"
       + " WHERE ((( testcube . dt ) between  '" + from + "'  and  '" + to + "' ))";
     System.out.println("rewrittenQuery.toHQL() " + rewrittenQuery.toHQL());
     System.out.println("expected " + expected);
     compareQueries(rewrittenQuery.toHQL(), expected);
 
-    //from date 4 days back
-    qCal.setTime(BEFORE_4_DAYS_START);
-    Date from4DaysBackDate = qCal.getTime();
-    String qFrom4DaysBackDate = qFmt.format(from4DaysBackDate);
-    LensException th = getLensExceptionInRewrite("select SUM(msr15) from testCube where"
-      + " time_range_in(d_time, '" + qFrom4DaysBackDate + "', '" + qTo + "')", getConf());
+    //from date 6 days back
+    timeRangeString = getTimeRangeString(DAILY, -6, 0, qFmt);
+    LensException th = getLensExceptionInRewrite("select SUM(msr15) from testCube where "
+      + timeRangeString, getConf());
     assertEquals(th.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
   }
 
@@ -148,8 +136,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     CubeQueryContext rewrittenQuery =
       rewriteCtx("cube select" + " SUM(msr2) from testCube where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
     String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(rewrittenQuery.toHQL(), expected);
     System.out.println("Non existing parts:" + rewrittenQuery.getNonExistingParts());
     assertNotNull(rewrittenQuery.getNonExistingParts());
@@ -160,7 +148,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     Configuration conf = getConf();
     conf.setClass(CubeQueryConfUtil.TIME_RANGE_WRITER_CLASS, AbridgedTimeRangeWriter.class, TimeRangeWriter.class);
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, false);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2,C4");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1,C2,C4");
     CubeQueryContext cubeQueryContext =
       rewriteCtx("cube select" + " SUM(msr2) from testCube where " + THIS_YEAR_RANGE, conf);
     PruneCauses<CubeFactTable> pruneCause = cubeQueryContext.getFactPruningMsgs();
@@ -181,15 +169,15 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery = rewrite("cube select" + " SUM(msr2) from testCube where " + TWO_DAYS_RANGE, getConfWithStorages(
       "C2"));
     String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     Configuration conf = getConfWithStorages("C1");
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
         getWhereForHourly2days("c1_testfact2"));
     compareQueries(hqlQuery, expected);
 
@@ -214,8 +202,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     CubeQueryContext rewrittenQuery =
       rewriteCtx("cube select" + " SUM(msr2) from derivedCube where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
     String expected =
-      getExpectedQuery(CubeTestSetup.DERIVED_CUBE_NAME, "select sum(derivedCube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(CubeTestSetup.DERIVED_CUBE_NAME, "C2_testfact"));
+      getExpectedQuery(DERIVED_CUBE_NAME, "select sum(derivedCube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(DERIVED_CUBE_NAME, "C2_testfact"));
     compareQueries(rewrittenQuery.toHQL(), expected);
     System.out.println("Non existing parts:" + rewrittenQuery.getNonExistingParts());
     assertNotNull(rewrittenQuery.getNonExistingParts());
@@ -226,41 +214,41 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     // test join
     Configuration conf = getConf();
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
+    conf.setBoolean(DISABLE_AUTO_JOINS, false);
     String hqlQuery;
 
     hqlQuery = rewrite("cube select" + " testdim2.name, SUM(msr2) from derivedCube where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(CubeTestSetup.DERIVED_CUBE_NAME, "select testdim2.name, sum(derivedCube.msr2) FROM ", " JOIN "
+      getExpectedQuery(DERIVED_CUBE_NAME, "select testdim2.name, sum(derivedCube.msr2) FROM ", " JOIN "
           + getDbName() + "c1_testdim2tbl testdim2 ON derivedCube.dim2 = "
           + " testdim2.id and (testdim2.dt = 'latest') ", null, "group by (testdim2.name)", null,
-        getWhereForDailyAndHourly2days(CubeTestSetup.DERIVED_CUBE_NAME, "c1_summary2"));
+        getWhereForDailyAndHourly2days(DERIVED_CUBE_NAME, "c1_summary2"));
     compareQueries(hqlQuery, expected);
 
     // Test that explicit join query passes with join resolver disabled
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, true);
+    conf.setBoolean(DISABLE_AUTO_JOINS, true);
     List<String> joinWhereConds = new ArrayList<String>();
     joinWhereConds.add(StorageUtil.getWherePartClause("dt", "testdim2", StorageConstants.getPartitionsForLatest()));
     hqlQuery =
       rewrite("cube select" + " testdim2.name, SUM(msr2) from derivedCube "
         + " inner join testdim2 on derivedCube.dim2 = testdim2.id " + "where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(CubeTestSetup.DERIVED_CUBE_NAME, "select testdim2.name, sum(derivedCube.msr2) FROM ",
+      getExpectedQuery(DERIVED_CUBE_NAME, "select testdim2.name, sum(derivedCube.msr2) FROM ",
         " inner JOIN " + getDbName() + "c1_testdim2tbl testdim2 ON derivedCube.dim2 = " + " testdim2.id ", null,
         "group by (testdim2.name)", joinWhereConds,
-        getWhereForDailyAndHourly2days(CubeTestSetup.DERIVED_CUBE_NAME, "c1_summary2"));
+        getWhereForDailyAndHourly2days(DERIVED_CUBE_NAME, "c1_summary2"));
     compareQueries(hqlQuery, expected);
   }
 
   @Test
   public void testCubeInsert() throws Exception {
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     String hqlQuery = rewrite("insert overwrite directory" + " 'target/test' select SUM(msr2) from testCube where "
       + TWO_DAYS_RANGE, conf);
-    Map<String, String> wh = getWhereForDailyAndHourly2days(cubeName, "C2_testfact");
+    Map<String, String> wh = getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact");
     String expected = "insert overwrite directory 'target/test' "
-      + getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, wh);
+      + getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null, wh);
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("insert overwrite directory" + " 'target/test' cube select SUM(msr2) from testCube where "
@@ -269,9 +257,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     hqlQuery = rewrite("insert overwrite local directory" + " 'target/test' select SUM(msr2) from testCube where "
       + TWO_DAYS_RANGE, conf);
-    wh = getWhereForDailyAndHourly2days(cubeName, "C2_testfact");
+    wh = getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact");
     expected = "insert overwrite local directory 'target/test' "
-      + getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, wh);
+      + getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null, wh);
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("insert overwrite local directory" + " 'target/test' cube select SUM(msr2) from testCube where "
@@ -280,9 +268,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     hqlQuery = rewrite("insert overwrite table temp" + " select SUM(msr2) from testCube where " + TWO_DAYS_RANGE,
       conf);
-    wh = getWhereForDailyAndHourly2days(cubeName, "C2_testfact");
+    wh = getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact");
     expected = "insert overwrite table temp "
-      + getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, wh);
+      + getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null, wh);
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("insert overwrite table temp" + " cube select SUM(msr2) from testCube where " + TWO_DAYS_RANGE,
@@ -323,8 +311,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery, expected;
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     // Test with partition existence
@@ -332,68 +320,68 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
         getWhereForHourly2days("c1_testfact2"));
     compareQueries(hqlQuery, expected);
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, false);
 
     // Tests for valid tables
-    conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact");
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    conf.set(CubeQueryConfUtil.getValidFactTablesKey(TEST_CUBE_NAME), "testFact");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C1_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C1_testfact"));
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
-    conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(CubeQueryConfUtil.getValidFactTablesKey(TEST_CUBE_NAME), "testFact");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
-    conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1");
+    conf.set(CubeQueryConfUtil.getValidFactTablesKey(TEST_CUBE_NAME), "testFact2");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
         getWhereForHourly2days("c1_testfact2"));
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
-    conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1");
+    conf.set(CubeQueryConfUtil.getValidFactTablesKey(TEST_CUBE_NAME), "testFact2");
     conf.set(getValidStorageTablesKey("testFact2"), "C1_testFact2");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
         getWhereForHourly2days("c1_testfact2"));
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact");
+    conf.set(CubeQueryConfUtil.getValidFactTablesKey(TEST_CUBE_NAME), "testFact");
     conf.set(getValidStorageTablesKey("testfact"), "C1_testFact");
     conf.set(getValidUpdatePeriodsKey("testfact", "C1"), "HOURLY");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-    expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c1_testfact"));
+    expected = getExpectedQuery(TEST_CUBE_NAME,
+      "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c1_testfact"));
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     conf.set(getValidStorageTablesKey("testfact"), "C2_testFact");
     conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "HOURLY");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-    expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c2_testfact"));
+    expected = getExpectedQuery(TEST_CUBE_NAME,
+      "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c2_testfact"));
     compareQueries(hqlQuery, expected);
 
     // max interval test
     conf = new Configuration();
     conf.set(CubeQueryConfUtil.QUERY_MAX_INTERVAL, "HOURLY");
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1,C2");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-    expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c1_testfact2"));
+    expected = getExpectedQuery(TEST_CUBE_NAME,
+      "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c1_testfact2"));
     compareQueries(hqlQuery, expected);
   }
 
@@ -406,8 +394,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "MONTHLY,DAILY");
     ArrayList<String> storages = Lists.newArrayList("c1_testfact", "c2_testfact");
     try {
-      CubeTestSetup.getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(HOURLY, DAILY));
-      CubeTestSetup.getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(MONTHLY));
+      getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(HOURLY, DAILY));
+      getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(MONTHLY));
 
       // Union query
       String hqlQuery;
@@ -430,7 +418,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
       hqlQuery = rewrite("select cityid as `City ID`, msr8, msr7 as `Third measure` "
         + "from testCube where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
 
-      expected = getExpectedUnionQuery(cubeName, storages, provider,
+      expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT testcube.alias0 as `City ID`, sum(testcube.alias1) + max(testcube.alias2), "
           + "case when sum(testcube.alias1) = 0 then 0 else sum(testcube.alias3)/sum(testcube.alias1) end "
           + "as `Third Measure`",
@@ -445,7 +433,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
       hqlQuery = rewrite("select cityid as `City ID`, msr3 as `Third measure` from testCube where "
         + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr7 > 10", conf);
 
-      expected = getExpectedUnionQuery(cubeName, storages, provider,
+      expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT testcube.alias0 as `City ID`, max(testcube.alias1) as `Third measure`",
         null, "group by testcube.alias0 having "
           + "(case when sum(testcube.alias2)=0 then 0 else sum(testcube.alias3)/sum(testcube.alias2) end > 10 )",
@@ -458,7 +446,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
       hqlQuery = rewrite("select cityid as `City ID`, msr3 as `Third measure` from testCube where "
         + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr8 > 10", conf);
 
-      expected = getExpectedUnionQuery(cubeName, storages, provider,
+      expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT testcube.alias0 as `City ID`, max(testcube.alias1) as `Third measure`",
         null, "GROUP BY testcube.alias0 "
           + "HAVING (sum(testcube.alias2) + max(testcube.alias1)) > 10 ",
@@ -469,7 +457,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
       hqlQuery = rewrite("select msr3 as `Measure 3` from testCube where "
         + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr2 > 10 and msr2 < 100", conf);
 
-      expected = getExpectedUnionQuery(cubeName, storages, provider,
+      expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT max(testcube.alias0) as `Measure 3` ",
         null, " HAVING sum(testcube.alias1) > 10 and sum(testcube.alias1) < 100",
         "SELECT max(testcube.msr3) as `alias0`, sum(testcube.msr2) as `alias1`", null, null);
@@ -479,7 +467,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + "SUM(msr2) as `Measure 2` from testCube where "
         + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr4 > 10 order by cityid desc limit 5", conf);
 
-      expected = getExpectedUnionQuery(cubeName, storages, provider,
+      expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT testcube.alias0, testcube.alias1 as `City ID`, max(testcube.alias2) as `Measure 3`, "
           + "count(testcube.alias3), sum(testcube.alias4) as `Measure 2`",
         null, "group by testcube.alias0, testcube.alias1 "
@@ -490,12 +478,12 @@ public class TestCubeRewriter extends TestQueryRewrite {
       compareQueries(hqlQuery, expected);
 
       conf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, false);
-      conf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, false);
+      conf.setBoolean(ENABLE_SELECT_TO_GROUPBY, false);
       hqlQuery = rewrite("select cityid as `City ID`, msr3 as `Measure 3`, "
         + "SUM(msr2) as `Measure 2` from testCube" + " where "
         + TWO_MONTHS_RANGE_UPTO_HOURS + " group by zipcode having msr4 > 10 order by cityid desc limit 5", conf);
 
-      expected = getExpectedUnionQuery(cubeName, storages, provider,
+      expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT testcube.alias0 as `City ID`,max(testcube.alias1) as `Measure 3`,sum(testcube.alias2) as `Measure 2` ",
         null, "group by testcube.alias3 having count(testcube.alias4) > 10 order by testcube.alias0 desc limit 5",
         "SELECT testcube.cityid as `alias0`, max(testcube.msr3) as `alias1`, "
@@ -503,7 +491,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         null, "GROUP BY testcube.zipcode");
       compareQueries(hqlQuery, expected);
     } finally {
-      CubeTestSetup.getStorageToUpdatePeriodMap().clear();
+      getStorageToUpdatePeriodMap().clear();
     }
 
   }
@@ -517,12 +505,12 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.set(getValidUpdatePeriodsKey("testfact2", "C1"), "YEARLY");
     conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "HOURLY");
 
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(DAILY));
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(HOURLY));
+    getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(DAILY));
+    getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(HOURLY));
     StoragePartitionProvider provider = new StoragePartitionProvider() {
       @Override
       public Map<String, String> providePartitionsForStorage(String storage) {
-        return getWhereForDailyAndHourly2days(cubeName, storage);
+        return getWhereForDailyAndHourly2days(TEST_CUBE_NAME, storage);
       }
     };
     try {
@@ -530,20 +518,21 @@ public class TestCubeRewriter extends TestQueryRewrite {
       String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
       System.out.println("HQL:" + hqlQuery);
 
-      String expected = getExpectedUnionQuery(cubeName, Lists.newArrayList("c1_testfact", "c2_testfact"), provider,
+      String expected = getExpectedUnionQuery(TEST_CUBE_NAME,
+        Lists.newArrayList("c1_testfact", "c2_testfact"), provider,
         "select sum(testcube.alias0) ", null, null,
         "select sum(testcube.msr2) as `alias0` from ", null, null
       );
       compareQueries(hqlQuery, expected);
     } finally {
-      CubeTestSetup.getStorageToUpdatePeriodMap().clear();
+      getStorageToUpdatePeriodMap().clear();
     }
   }
 
   @Test
   public void testCubeWhereQueryWithMultipleTablesForMonth() throws Exception {
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2,C3");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1,C2,C3");
     conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, true);
     conf.set(getValidStorageTablesKey("testfact"), "");
     conf.set(getValidUpdatePeriodsKey("testfact", "C1"), "HOURLY");
@@ -552,9 +541,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "DAILY");
     conf.set(getValidUpdatePeriodsKey("testfact", "C3"), "MONTHLY");
 
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(HOURLY));
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(DAILY));
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("c3_testfact", Lists.newArrayList(MONTHLY));
+    getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(HOURLY));
+    getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(DAILY));
+    getStorageToUpdatePeriodMap().put("c3_testfact", Lists.newArrayList(MONTHLY));
     StoragePartitionProvider provider = new StoragePartitionProvider() {
       @Override
       public Map<String, String> providePartitionsForStorage(String storage) {
@@ -566,13 +555,13 @@ public class TestCubeRewriter extends TestQueryRewrite {
       String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
       System.out.println("HQL:" + hqlQuery);
       ArrayList<String> storages = Lists.newArrayList("c1_testfact", "c2_testfact", "c3_testfact");
-      String expected = getExpectedUnionQuery(cubeName, storages, provider,
+      String expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "select sum(testcube.alias0)", null, null,
         "select sum(testcube.msr2) as `alias0` from ", null, null
       );
       compareQueries(hqlQuery, expected);
     } finally {
-      CubeTestSetup.getStorageToUpdatePeriodMap().clear();
+      getStorageToUpdatePeriodMap().clear();
     }
   }
 
@@ -580,29 +569,29 @@ public class TestCubeRewriter extends TestQueryRewrite {
   public void testPartColAsQueryColumn() throws Exception {
     Configuration conf = getConf();
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, false);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C3");
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C3");
+    conf.setBoolean(DISABLE_AUTO_JOINS, false);
     String hql, expected;
     hql = rewrite(
       "select countrydim.name, msr2 from" + " testCube" + " where countrydim.region = 'asia' and "
         + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select countrydim.name, sum(testcube.msr2)" + " FROM ", " JOIN " + getDbName()
+      getExpectedQuery(TEST_CUBE_NAME, "select countrydim.name, sum(testcube.msr2)" + " FROM ", " JOIN " + getDbName()
           + "c3_countrytable_partitioned countrydim on testcube.countryid=countrydim.id and countrydim.dt='latest'",
         "countrydim.region='asia'",
         " group by countrydim.name ", null,
-        getWhereForHourly2days(cubeName, "C3_testfact2_raw"));
+        getWhereForHourly2days(TEST_CUBE_NAME, "C3_testfact2_raw"));
     compareQueries(hql, expected);
     hql = rewrite(
       "select statedim.name, statedim.countryid, msr2 from" + " testCube" + " where statedim.countryid = 5 and "
         + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select statedim.name, statedim.countryid, sum(testcube.msr2)" + " FROM ",
+      getExpectedQuery(TEST_CUBE_NAME, "select statedim.name, statedim.countryid, sum(testcube.msr2)" + " FROM ",
         " JOIN " + getDbName()
           + "c3_statetable_partitioned statedim ON" + " testCube.stateid = statedim.id and statedim.dt = 'latest'",
         "statedim.countryid=5",
         " group by statedim.name, statedim.countryid", null,
-        getWhereForHourly2days(cubeName, "C3_testfact2_raw"));
+        getWhereForHourly2days(TEST_CUBE_NAME, "C3_testfact2_raw"));
     compareQueries(hql, expected);
   }
 
@@ -610,7 +599,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
   public void testCubeJoinQuery() throws Exception {
     // q1
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     String hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " join citydim on testCube.cityid = citydim.id" + " where "
         + TWO_DAYS_RANGE, conf);
@@ -618,9 +607,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
     //    joinWhereConds.add(StorageUtil.getWherePartClause("dt", "citydim", StorageConstants.getPartitionsForLatest
     // ()));
     String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2)" + " FROM ", " INNER JOIN " + getDbName()
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2)" + " FROM ", " INNER JOIN " + getDbName()
           + "c2_citytable citydim ON" + " testCube.cityid = citydim.id", null, null, joinWhereConds,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
@@ -638,16 +627,17 @@ public class TestCubeRewriter extends TestQueryRewrite {
       rewrite("select statedim.name, SUM(msr2) from" + " testCube" + " join citydim on testCube.cityid = citydim.id"
         + " left outer join statedim on statedim.id = citydim.stateid"
         + " right outer join zipdim on citydim.zipcode = zipdim.code" + " where " + TWO_DAYS_RANGE, getConf());
-    joinWhereConds = new ArrayList<String>();
+    joinWhereConds = new ArrayList<>();
     joinWhereConds.add(StorageUtil.getWherePartClause("dt", "citydim", StorageConstants.getPartitionsForLatest()));
     joinWhereConds.add(StorageUtil.getWherePartClause("dt", "zipdim", StorageConstants.getPartitionsForLatest()));
     expected =
-      getExpectedQuery(cubeName, "select statedim.name," + " sum(testcube.msr2) FROM ", "INNER JOIN " + getDbName()
+      getExpectedQuery(TEST_CUBE_NAME,
+        "select statedim.name," + " sum(testcube.msr2) FROM ", "INNER JOIN " + getDbName()
           + "c1_citytable citydim ON" + " testCube.cityid = citydim.id LEFT OUTER JOIN " + getDbName()
           + "c1_statetable statedim" + " ON statedim.id = citydim.stateid AND "
           + "(statedim.dt = 'latest') RIGHT OUTER JOIN " + getDbName() + "c1_ziptable"
           + " zipdim ON citydim.zipcode = zipdim.code", null, " group by" + " statedim.name ", joinWhereConds,
-        getWhereForHourly2days(cubeName, "C1_testfact2"));
+        getWhereForHourly2days(TEST_CUBE_NAME, "C1_testfact2"));
     compareQueries(hqlQuery, expected);
 
     // q3
@@ -672,18 +662,18 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + " left outer join citydim on testCube.cityid = citydim.id"
         + " left outer join zipdim on citydim.zipcode = zipdim.code" + " where " + TWO_DAYS_RANGE, getConf());
     expected =
-      getExpectedQuery(cubeName, "select citydim.name," + " sum(testcube.msr2) FROM ", " LEFT OUTER JOIN "
+      getExpectedQuery(TEST_CUBE_NAME, "select citydim.name," + " sum(testcube.msr2) FROM ", " LEFT OUTER JOIN "
           + getDbName() + "c1_citytable citydim ON" + " testCube.cityid = citydim.id and (citydim.dt = 'latest') "
           + " LEFT OUTER JOIN " + getDbName() + "c1_ziptable" + " zipdim ON citydim.zipcode = zipdim.code AND "
           + "(zipdim.dt = 'latest')", null, " group by" + " citydim.name ", null,
-        getWhereForHourly2days(cubeName, "C1_testfact2"));
+        getWhereForHourly2days(TEST_CUBE_NAME, "C1_testfact2"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " join countrydim on testCube.countryid = countrydim.id" + " where "
         + TWO_MONTHS_RANGE_UPTO_MONTH, getConf());
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " INNER JOIN " + getDbName()
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", " INNER JOIN " + getDbName()
           + "c1_countrytable countrydim ON testCube.countryid = " + " countrydim.id", null, null, null,
         getWhereForMonthly2months("c2_testfactmonthly"));
     compareQueries(hqlQuery, expected);
@@ -698,23 +688,23 @@ public class TestCubeRewriter extends TestQueryRewrite {
   public void testCubeGroupbyWithConstantProjected() throws Exception {
     // check constants
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     String hqlQuery1 = rewrite("select cityid, 99, \"placeHolder\", -1001, SUM(msr2) from testCube" + " where "
       + TWO_DAYS_RANGE, conf);
-    String expected1 = getExpectedQuery(cubeName, "select testcube.cityid, 99, \"placeHolder\", -1001,"
+    String expected1 = getExpectedQuery(TEST_CUBE_NAME, "select testcube.cityid, 99, \"placeHolder\", -1001,"
         + " sum(testcube.msr2) FROM ", null, " group by testcube.cityid ",
-      getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery1, expected1);
 
     // check constants with expression
     String hqlQuery2 = rewrite(
       "select cityid, case when stateid = 'za' then \"Not Available\" end, 99, \"placeHolder\", -1001, "
         + "SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-    String expected2 = getExpectedQuery(cubeName,
+    String expected2 = getExpectedQuery(TEST_CUBE_NAME,
       "select testcube.cityid, case when testcube.stateid = 'za' then \"Not Available\" end, 99, \"placeHolder\","
         + " -1001, sum(testcube.msr2) FROM ", null,
       " group by testcube.cityid, case when testcube.stateid = 'za' then \"Not Available\" end ",
-      getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery2, expected2);
 
     // check expression with boolean and numeric constants
@@ -723,7 +713,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + "case when stateid='za' then 99 else -1001 end,  "
         + "SUM(msr2), SUM(msr2 + 39), SUM(msr2) + 567 from testCube" + " where " + TWO_DAYS_RANGE, conf);
     String expected3 = getExpectedQuery(
-      cubeName,
+      TEST_CUBE_NAME,
       "select testcube.cityid, testcube.stateid + 99, 44 + testcube.stateid, testcube.stateid - 33,"
         + " 999 - testcube.stateid, TRUE, FALSE, round(123.4567,2), "
         + "case when testcube.stateid='za' then 99 else -1001 end,"
@@ -732,14 +722,14 @@ public class TestCubeRewriter extends TestQueryRewrite {
       " group by testcube.cityid,testcube.stateid + 99, 44 + testcube.stateid, testcube.stateid - 33, "
         + "999 - testcube.stateid, "
         + " case when testcube.stateid='za' then 99 else -1001 end ",
-      getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery3, expected3);
   }
 
   @Test
   public void testCubeGroupbyQuery() throws Exception {
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     String hqlQuery =
       rewrite("select name, SUM(msr2) from" + " testCube join citydim on testCube.cityid = citydim.id where "
         + TWO_DAYS_RANGE, conf);
@@ -747,9 +737,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
     //    joinWhereConds.add(StorageUtil.getWherePartClause("dt", "citydim", StorageConstants.getPartitionsForLatest
     // ()));
     String expected =
-      getExpectedQuery(cubeName, "select citydim.name," + " sum(testcube.msr2) FROM ", "INNER JOIN " + getDbName()
+      getExpectedQuery(TEST_CUBE_NAME, "select citydim.name," + " sum(testcube.msr2) FROM ", "INNER JOIN " + getDbName()
           + "c2_citytable citydim ON" + " testCube.cityid = citydim.id", null, " group by citydim.name ",
-        joinWhereConds, getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+        joinWhereConds, getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
@@ -759,70 +749,70 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     hqlQuery = rewrite("select cityid, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select testcube.cityid," + " sum(testcube.msr2) FROM ", null,
-        " group by testcube.cityid ", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.cityid," + " sum(testcube.msr2) FROM ", null,
+        " group by testcube.cityid ", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select round(cityid), SUM(msr2) from" + " testCube where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select round(testcube.cityid)," + " sum(testcube.msr2) FROM ", null,
-        " group by round(testcube.cityid) ", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select round(testcube.cityid)," + " sum(testcube.msr2) FROM ", null,
+        " group by round(testcube.cityid) ", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select SUM(msr2) from testCube" + "  where " + TWO_DAYS_RANGE + "group by round(zipcode)", conf);
     expected =
-      getExpectedQuery(cubeName, "select round(testcube.zipcode)," + " sum(testcube.msr2) FROM ", null,
-        " group by round(testcube.zipcode) ", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select round(testcube.zipcode)," + " sum(testcube.msr2) FROM ", null,
+        " group by round(testcube.zipcode) ", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select round(cityid), SUM(msr2) from" + " testCube where " + TWO_DAYS_RANGE + " group by zipcode",
         conf);
     expected =
-      getExpectedQuery(cubeName, "select " + " round(testcube.cityid), sum(testcube.msr2) FROM ", null,
-        " group by testcube.zipcode", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select " + " round(testcube.cityid), sum(testcube.msr2) FROM ", null,
+        " group by testcube.zipcode", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select round(cityid), SUM(msr2) from" + " testCube where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select " + " round(testcube.cityid), sum(testcube.msr2) FROM ", null,
-        " group by round(testcube.cityid)", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select " + " round(testcube.cityid), sum(testcube.msr2) FROM ", null,
+        " group by round(testcube.cityid)", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select cityid, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE + " group by round(zipcode)",
         conf);
     expected =
-      getExpectedQuery(cubeName, "select " + " testcube.cityid, sum(testcube.msr2) FROM ", null,
-        " group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select " + " testcube.cityid, sum(testcube.msr2) FROM ", null,
+        " group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE + " group by round(zipcode)", conf);
     expected =
-      getExpectedQuery(cubeName, "select round(testcube.zipcode)," + " sum(testcube.msr2) FROM ", null,
-        " group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select round(testcube.zipcode)," + " sum(testcube.msr2) FROM ", null,
+        " group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select cityid, msr2 from testCube" + " where " + TWO_DAYS_RANGE + " group by round(zipcode)", conf);
     expected =
-      getExpectedQuery(cubeName, "select " + " testcube.cityid, sum(testcube.msr2) FROM ", null,
-        " group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select " + " testcube.cityid, sum(testcube.msr2) FROM ", null,
+        " group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select round(zipcode) rzc," + " msr2 from testCube where " + TWO_DAYS_RANGE + " group by zipcode"
         + " order by rzc", conf);
     expected =
-      getExpectedQuery(cubeName, "select round(testcube.zipcode) as `rzc`," + " sum(testcube.msr2) FROM ", null,
-        " group by testcube.zipcode  order by rzc asc", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select round(testcube.zipcode) as `rzc`," + " sum(testcube.msr2) FROM ", null,
+        " group by testcube.zipcode  order by rzc asc", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     // rewrite with expressions
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1, C2");
+    conf.setBoolean(DISABLE_AUTO_JOINS, false);
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1, C2");
     hqlQuery =
       rewrite("SELECT citydim.name AS g1," + " CASE  WHEN citydim.name=='NULL'  THEN 'NULL' "
         + " WHEN citydim.name=='X'  THEN 'X-NAME' " + " WHEN citydim.name=='Y'  THEN 'Y-NAME' "
@@ -854,7 +844,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + "";
     expected =
       getExpectedQuery(
-        cubeName,
+        TEST_CUBE_NAME,
         "SELECT ( citydim.name ) as `g1` ,"
           + "  case  when (( citydim.name ) ==  'NULL' ) then  'NULL'  when (( citydim.name ) ==  'X' )"
           + " then  'X-NAME'  when (( citydim.name ) ==  'Y' ) then  'Y-NAME'"
@@ -963,7 +953,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String expectedRewrittenQuery = "SELECT ( citydim . name ) as `Alias With Spaces` , sum(( testcube . msr2 )) "
       + "as `TestMeasure`  FROM TestQueryRewrite.c2_testfact testcube inner JOIN TestQueryRewrite.c2_citytable citydim "
       + "ON (( testcube . cityid ) = ( citydim . id )) WHERE (((( testcube . dt ) =  '"
-      + CubeTestSetup.getDateUptoHours(LAST_HOUR) + "' ))) GROUP BY ( citydim . name )";
+      + getDateUptoHours(getDateWithOffset(HOURLY, -1)) + "' ))) GROUP BY ( citydim . name )";
 
     String actualRewrittenQuery = rewrite(inputQuery, getConfWithStorages("C2"));
 
@@ -980,7 +970,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String expectedRewrittenQuery = "SELECT ( citydim . name ) as `Alias With Spaces` , sum(( testcube . msr2 )) "
       + "as `TestMeasure`  FROM TestQueryRewrite.c2_testfact testcube inner JOIN TestQueryRewrite.c2_citytable citydim "
       + "ON (( testcube . cityid ) = ( citydim . id )) WHERE (((( testcube . dt ) =  '"
-      + CubeTestSetup.getDateUptoHours(LAST_HOUR) + "' ))) GROUP BY ( citydim . name )";
+      + getDateUptoHours(getDateWithOffset(HOURLY, -1)) + "' ))) GROUP BY ( citydim . name )";
 
     String actualRewrittenQuery = rewrite(inputQuery, getConfWithStorages("C2"));
 
@@ -992,8 +982,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery = rewrite("select SUM(msr2) m2 from" + " testCube where " + TWO_DAYS_RANGE, getConfWithStorages(
       "C2"));
     String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) as `m2` FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) as `m2` FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select SUM(msr2) from testCube mycube" + " where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
@@ -1005,8 +995,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     hqlQuery =
       rewrite("select SUM(testCube.msr2) from testCube" + " where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select mycube.msr2 m2 from testCube" + " mycube where " + TWO_DAYS_RANGE, getConfWithStorages(
@@ -1018,8 +1008,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     hqlQuery = rewrite("select testCube.msr2 m2 from testCube" + " where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) as `m2` FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) as `m2` FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
   }
 
@@ -1028,7 +1018,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, getConfWithStorages("C2"));
     String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
         getWhereForMonthlyDailyAndHourly2months("C2_testfact"));
     compareQueries(hqlQuery, expected);
   }
@@ -1079,7 +1069,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery = rewrite("select cityid, SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_MONTH,
       getConfWithStorages("C2"));
     String expected =
-      getExpectedQuery(cubeName, "select testcube.cityid," + " sum(testcube.msr2) FROM ", null,
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.cityid," + " sum(testcube.msr2) FROM ", null,
         "group by testcube.cityid", getWhereForMonthly2months("c2_testfact"));
     compareQueries(hqlQuery, expected);
   }
@@ -1172,26 +1162,26 @@ public class TestCubeRewriter extends TestQueryRewrite {
         "c1_citytable", true);
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     hqlQuery = rewrite("select name, stateid from citydim", conf);
     expected =
       getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c2_citytable", false);
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1");
     hqlQuery = rewrite("select name, stateid from citydim", conf);
     expected =
       getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c1_citytable", true);
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "");
     conf.set(CubeQueryConfUtil.VALID_STORAGE_DIM_TABLES, "C1_citytable");
     hqlQuery = rewrite("select name, stateid from citydim", conf);
     expected =
       getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c1_citytable", true);
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "");
     conf.set(CubeQueryConfUtil.VALID_STORAGE_DIM_TABLES, "C2_citytable");
     hqlQuery = rewrite("select name, stateid from citydim", conf);
     expected =
@@ -1221,13 +1211,13 @@ public class TestCubeRewriter extends TestQueryRewrite {
       getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", " limit 100", "c1_citytable",
         true);
     compareQueries(hqlQuery, expected);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     hqlQuery = rewrite("select name, stateid from citydim " + "limit 100", conf);
     expected =
       getExpectedQuery("citydim", "select citydim.name," + "citydim.stateid from ", " limit 100", "c2_citytable",
         false);
     compareQueries(hqlQuery, expected);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1");
     hqlQuery = rewrite("select name, stateid from citydim" + " limit 100", conf);
     expected =
       getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", " limit 100", "c1_citytable",
@@ -1263,16 +1253,16 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String[] expectedQueries = {
       getExpectedQuery("t", "SELECT t.cityid, sum(t.msr2) FROM ", null, " group by t.cityid",
         getWhereForDailyAndHourly2days("t", "C2_testfact")),
-      getExpectedQuery(cubeName, "SELECT testCube.cityid, sum(testCube.msr2)" + " FROM ",
+      getExpectedQuery(TEST_CUBE_NAME, "SELECT testCube.cityid, sum(testCube.msr2)" + " FROM ",
         " testcube.cityid > 100 ", " group by testcube.cityid having" + " sum(testCube.msr2) < 1000",
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact")),
-      getExpectedQuery(cubeName, "SELECT testCube.cityid, sum(testCube.msr2)" + " FROM ",
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact")),
+      getExpectedQuery(TEST_CUBE_NAME, "SELECT testCube.cityid, sum(testCube.msr2)" + " FROM ",
         " testcube.cityid > 100 ", " group by testcube.cityid having"
           + " sum(testCube.msr2) < 1000 order by testCube.cityid asc",
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact")),
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact")),
     };
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     for (int i = 0; i < queries.length; i++) {
       String hql = rewrite(queries[i], conf);
       compareQueries(hql, expectedQueries[i]);
@@ -1284,58 +1274,55 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery = rewrite("select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE,
       getConfWithStorages("C1"));
     String expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
-        " group by testcube.dim1", getWhereForDailyAndHourly2days(cubeName, "C1_summary1"));
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
+        " group by testcube.dim1", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C1_summary1"));
     compareQueries(hqlQuery, expected);
     hqlQuery =
       rewrite("select dim1, dim2, COUNT(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE,
         getConfWithStorages("C1"));
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
           + " sum(testcube.msr2), max(testcube.msr3) FROM ", null, " group by testcube.dim1, testcube.dim2",
-        getWhereForDailyAndHourly2days(cubeName, "C1_summary2"));
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C1_summary2"));
     compareQueries(hqlQuery, expected);
     hqlQuery =
       rewrite("select dim1, dim2, cityid, msr4," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE,
         getConfWithStorages("C1"));
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, testcube.cityid,"
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, testcube,dim2, testcube.cityid,"
           + " count(testcube.msr4), sum(testcube.msr2), max(testcube.msr3) FROM ", null,
         " group by testcube.dim1, testcube.dim2, testcube.cityid",
-        getWhereForDailyAndHourly2days(cubeName, "C1_summary3"));
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C1_summary3"));
     compareQueries(hqlQuery, expected);
   }
 
   @Test
   public void testFactsWithTimedDimension() throws Exception {
-    String twoDaysITRange =
-      "time_range_in(it, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
 
     String hqlQuery =
-      rewrite("select dim1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, getConf());
+      rewrite("select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT, getConf());
     String expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
-        " group by testcube.dim1", getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary1"),
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
+        " group by testcube.dim1", getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "it", "C2_summary1"),
         null);
     compareQueries(hqlQuery, expected);
     hqlQuery =
-      rewrite("select dim1, dim2, COUNT(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + twoDaysITRange,
+      rewrite("select dim1, dim2, COUNT(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE_IT,
         getConf());
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
           + " sum(testcube.msr2), max(testcube.msr3) FROM ", null, " group by testcube.dim1, testcube.dim2",
-        getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary2"),
+        getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "it", "C2_summary2"),
         null);
     compareQueries(hqlQuery, expected);
     hqlQuery =
       rewrite("select dim1, dim2, cityid, count(msr4)," + " SUM(msr2), msr3 from testCube" + " where "
-        + twoDaysITRange, getConf());
+        + TWO_DAYS_RANGE_IT, getConf());
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, testcube.cityid,"
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, testcube,dim2, testcube.cityid,"
           + " count(testcube.msr4), sum(testcube.msr2), max(testcube.msr3) FROM ", null,
         " group by testcube.dim1, testcube.dim2, testcube.cityid",
-        getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary3"),
+        getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "it", "C2_summary3"),
         null);
     compareQueries(hqlQuery, expected);
   }
@@ -1344,55 +1331,50 @@ public class TestCubeRewriter extends TestQueryRewrite {
   // now.
   // @Test
   public void testCubeQueryTimedDimensionFilter() throws Exception {
-    String twoDaysITRange =
-      "time_range_in(it, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
-
     String hqlQuery =
-      rewrite("select dim1, max(msr3)," + " msr2 from testCube" + " where (" + twoDaysITRange
+      rewrite("select dim1, max(msr3)," + " msr2 from testCube" + " where (" + TWO_DAYS_RANGE_IT
         + " OR it == 'default') AND dim1 > 1000", getConf());
-    String expected = getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ",
+    String expected = getExpectedQuery(TEST_CUBE_NAME,
+      "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ",
       null, "or (( testcube.it ) == 'default')) and ((testcube.dim1) > 1000)" + " group by testcube.dim1",
-      getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary1"),
+      getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "it", "C2_summary1"),
       null);
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE + " OR ("
-      + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS + " AND dt='default')", getConf());
+      + TWO_DAYS_RANGE_BEFORE_4_DAYS + " AND dt='default')", getConf());
 
     String expecteddtRangeWhere1 =
-      getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "dt", TWODAYS_BACK, NOW)
+      getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "dt", TWODAYS_BACK, NOW)
         + " OR ("
-        + getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "dt", BEFORE_4_DAYS_START, BEFORE_4_DAYS_END) + ")";
+        + getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "dt", BEFORE_6_DAYS, BEFORE_4_DAYS) + ")";
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, " AND testcube.dt='default'",
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, " AND testcube.dt='default'",
         expecteddtRangeWhere1, "c2_testfact");
     compareQueries(hqlQuery, expected);
 
     String expecteddtRangeWhere2 =
       "("
-        + getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "dt", TWODAYS_BACK, NOW)
+        + getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "dt", TWODAYS_BACK, NOW)
         + " AND testcube.dt='dt1') OR "
-        + getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "dt", BEFORE_4_DAYS_START, BEFORE_4_DAYS_END);
+        + getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "dt", BEFORE_6_DAYS, BEFORE_4_DAYS);
     hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " where (" + TWO_DAYS_RANGE + " AND dt='dt1') OR ("
-        + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS + " AND dt='default')", getConf());
+        + TWO_DAYS_RANGE_BEFORE_4_DAYS + " AND dt='default')", getConf());
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, " AND testcube.dt='default'",
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, " AND testcube.dt='default'",
         expecteddtRangeWhere2, "c2_testfact");
     compareQueries(hqlQuery, expected);
 
-    String twoDaysPTRange =
-      "time_range_in(pt, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
+    String twoDaysPTRange = getTimeRangeString("pt", DAILY, -2, 0, HOURLY);
     hqlQuery =
-      rewrite("select dim1, max(msr3)," + " msr2 from testCube where (" + twoDaysITRange + " OR (" + twoDaysPTRange
+      rewrite("select dim1, max(msr3)," + " msr2 from testCube where (" + TWO_DAYS_RANGE_IT + " OR (" + twoDaysPTRange
         + " and it == 'default')) AND dim1 > 1000", getConf());
     String expectedITPTrange =
-      getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", TWODAYS_BACK, NOW) + " OR ("
-        + getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "pt", TWODAYS_BACK, NOW) + ")";
+      getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "it", TWODAYS_BACK, NOW) + " OR ("
+        + getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "pt", TWODAYS_BACK, NOW) + ")";
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
         "AND testcube.it == 'default' and testcube.dim1 > 1000 group by testcube.dim1", expectedITPTrange,
         "C2_summary1");
     compareQueries(hqlQuery, expected);
@@ -1400,19 +1382,16 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
   @Test
   public void testLookAhead() throws Exception {
-    String twoDaysITRange =
-      "time_range_in(it, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
 
     Configuration conf = getConf();
     conf.set(CubeQueryConfUtil.PROCESS_TIME_PART_COL, "pt");
     conf.setClass(CubeQueryConfUtil.TIME_RANGE_WRITER_CLASS, AbridgedTimeRangeWriter.class, TimeRangeWriter.class);
-    CubeQueryContext ctx = rewriteCtx("select dim1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange,
+    CubeQueryContext ctx = rewriteCtx("select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT,
       conf);
     assertEquals(ctx.candidateFacts.size(), 1);
     CandidateFact candidateFact = ctx.candidateFacts.iterator().next();
     Set<FactPartition> partsQueried = new TreeSet<>(candidateFact.getPartsQueried());
-    Date ceilDay = DateUtil.getCeilDate(TWODAYS_BACK, DAILY);
+    Date ceilDay = DAILY.getCeilDate(getDateWithOffset(DAILY, -2));
     Date nextDay = DateUtils.addDays(ceilDay, 1);
     Date nextToNextDay = DateUtils.addDays(nextDay, 1);
     HashSet<String> storageTables = Sets.newHashSet();
@@ -1421,7 +1400,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     }
     TreeSet<FactPartition> expectedPartsQueried = Sets.newTreeSet();
     for (TimePartition p : Iterables.concat(
-      TimePartition.of(HOURLY, TWODAYS_BACK).rangeUpto(TimePartition.of(HOURLY, ceilDay)),
+      TimePartition.of(HOURLY, getDateWithOffset(DAILY, -2)).rangeUpto(TimePartition.of(HOURLY, ceilDay)),
       TimePartition.of(DAILY, ceilDay).rangeUpto(TimePartition.of(DAILY, nextDay)),
       TimePartition.of(HOURLY, nextDay).rangeUpto(TimePartition.of(HOURLY, NOW)))) {
       FactPartition fp = new FactPartition("it", p, null, storageTables);
@@ -1436,7 +1415,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     }
     assertEquals(partsQueried, expectedPartsQueried);
     conf.setInt(CubeQueryConfUtil.LOOK_AHEAD_PT_PARTS_PFX, 3);
-    ctx = rewriteCtx("select dim1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange,
+    ctx = rewriteCtx("select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT,
       conf);
     partsQueried = new TreeSet<>(ctx.candidateFacts.iterator().next().getPartsQueried());
     // pt does not exist beyond 1 day. So in this test, max look ahead possible is 3
@@ -1447,35 +1426,35 @@ public class TestCubeRewriter extends TestQueryRewrite {
   public void testCubeQueryWithMultipleRanges() throws Exception {
     String hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE + " OR "
-        + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C2"));
+        + TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C2"));
 
     String expectedRangeWhere =
-      getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "dt", TWODAYS_BACK, NOW)
+      getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "dt", TWODAYS_BACK, NOW)
         + " OR "
-        + getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "dt", BEFORE_4_DAYS_START, BEFORE_4_DAYS_END);
-    String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, expectedRangeWhere, "c2_testfact");
+        + getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "dt", BEFORE_6_DAYS, BEFORE_4_DAYS);
+    String expected = getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ",
+      null, null, expectedRangeWhere, "c2_testfact");
     compareQueries(hqlQuery, expected);
     hqlQuery =
       rewrite("select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE + " OR "
-        + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
+        + TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
         " group by testcube.dim1", expectedRangeWhere, "C1_summary1");
     compareQueries(hqlQuery, expected);
     hqlQuery =
       rewrite("select dim1, dim2, COUNT(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE
-        + " OR " + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
+        + " OR " + TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
           + " sum(testcube.msr2), max(testcube.msr3) FROM ", null, " group by testcube.dim1, testcube.dim2",
         expectedRangeWhere, "C1_summary2");
     compareQueries(hqlQuery, expected);
     hqlQuery =
       rewrite("select dim1, dim2, cityid, count(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE
-        + " OR " + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
+        + " OR " + TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, testcube.cityid,"
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, testcube,dim2, testcube.cityid,"
           + " count(testcube.msr4), sum(testcube.msr2), max(testcube.msr3) FROM ", null,
         " group by testcube.dim1, testcube.dim2, testcube.cityid", expectedRangeWhere, "C1_summary3");
     compareQueries(hqlQuery, expected);
@@ -1507,7 +1486,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + " left outer join statedim s1 on c1.stateid = s1.id"
         + " left outer join citydim c2 on s1.countryid = c2.id where " + TWO_DAYS_RANGE;
     Configuration conf = getConfWithStorages("C1");
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, true);
+    conf.setBoolean(DISABLE_AUTO_JOINS, true);
     String hqlQuery = rewrite(cubeQl, conf);
     String db = getDbName();
     String expectedJoin =
@@ -1517,19 +1496,19 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + db + "c1_citytable c2 ON (( s1 . countryid ) = ( c2 . id )) AND (c2.dt = 'latest')";
 
     String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2)" + " FROM ", expectedJoin, null, null, null,
-        getWhereForHourly2days(cubeName, "C1_testfact2"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2)" + " FROM ", expectedJoin, null, null, null,
+        getWhereForHourly2days(TEST_CUBE_NAME, "C1_testfact2"));
     compareQueries(hqlQuery, expected);
   }
 
   @Test
   public void testJoinPathColumnLifeValidation() throws Exception {
     HiveConf testConf = new HiveConf(new HiveConf(getConf(), HiveConf.class));
-    testConf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
-    System.out.println("@@ Joins disabled? " + testConf.get(CubeQueryConfUtil.DISABLE_AUTO_JOINS));
+    testConf.setBoolean(DISABLE_AUTO_JOINS, false);
+    System.out.println("@@ Joins disabled? " + testConf.get(DISABLE_AUTO_JOINS));
     // Set column life of dim2 column in testCube
     CubeMetastoreClient client = CubeMetastoreClient.getInstance(testConf);
-    Cube cube = (Cube) client.getCube(cubeName);
+    Cube cube = (Cube) client.getCube(TEST_CUBE_NAME);
 
     ReferencedDimAtrribute col = (ReferencedDimAtrribute) cube.getColumnByName("cdim2");
     assertNotNull(col);
@@ -1544,7 +1523,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     } finally {
       // Add old column back
       cube.alterDimension(col);
-      client.alterCube(cubeName, cube);
+      client.alterCube(TEST_CUBE_NAME, cube);
     }
 
     // Assert same query succeeds with valid column
@@ -1556,7 +1535,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         col.getReferences(), oneWeekBack, null,
         col.getCost());
     cube.alterDimension(newDim2);
-    client.alterCube(cubeName, cube);
+    client.alterCube(TEST_CUBE_NAME, cube);
     String hql = rewrite(query, testConf);
     assertNotNull(hql);
   }
@@ -1582,12 +1561,11 @@ public class TestCubeRewriter extends TestQueryRewrite {
     // Disabling conf should not replace the time dimension
 
     String query =
-      "SELECT test_time_dim, msr2 FROM testCube where " + "time_range_in(test_time_dim, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
+      "SELECT test_time_dim, msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD;
 
     HiveConf hconf = new HiveConf(getConf(), TestCubeRewriter.class);
-    hconf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
-    hconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2,C3,C4");
+    hconf.setBoolean(DISABLE_AUTO_JOINS, false);
+    hconf.set(DRIVER_SUPPORTED_STORAGES, "C1,C2,C3,C4");
     hconf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, true);
 
     CubeQueryRewriter rewriter = new CubeQueryRewriter(hconf, hconf);
@@ -1637,7 +1615,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
   public void testSelectDimonlyJoinOnCube() throws Exception {
     String query = "SELECT count (distinct citydim.name) from testCube where " + TWO_DAYS_RANGE;
     Configuration conf = new Configuration(getConf());
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
+    conf.setBoolean(DISABLE_AUTO_JOINS, false);
     String hql = rewrite(query, conf);
     System.out.println("@@ HQL = " + hql);
     assertNotNull(hql);
@@ -1658,10 +1636,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
 
     // Test 1 - check for contained part columns
-    String twoDaysITRange =
-      "time_range_in(it, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
-    String query = "select dim1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange;
+    String query = "select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT;
 
     HiveConf conf = new HiveConf(getConf(), TestCubeRewriter.class);
     conf.set(CubeQueryConfUtil.PROCESS_TIME_PART_COL, "pt");

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
deleted file mode 100644
index ff9a96d..0000000
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
+++ /dev/null
@@ -1,299 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import static java.util.Calendar.DAY_OF_MONTH;
-import static java.util.Calendar.MONTH;
-
-import static org.apache.lens.cube.metadata.UpdatePeriod.*;
-import static org.apache.lens.cube.parse.DateUtil.*;
-
-import static org.apache.commons.lang.time.DateUtils.addMilliseconds;
-
-import static org.testng.Assert.assertEquals;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.Set;
-
-import org.apache.lens.cube.parse.DateUtil.*;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.commons.lang.time.DateUtils;
-
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Test;
-
-import com.beust.jcommander.internal.Sets;
-import com.google.common.collect.Lists;
-
-import lombok.extern.slf4j.Slf4j;
-
-/**
- * Unit tests for cube DateUtil class TestDateUtil.
- */
-@Slf4j
-public class TestDateUtil {
-
-  public static final String[] TEST_PAIRS = {
-    "2013-Jan-01", "2013-Jan-31", "2013-Jan-01", "2013-May-31",
-    "2013-Jan-01", "2013-Dec-31", "2013-Feb-01", "2013-Apr-25",
-    "2012-Feb-01", "2013-Feb-01", "2011-Feb-01", "2013-Feb-01",
-    "2013-Jan-02", "2013-Feb-02", "2013-Jan-02", "2013-Mar-02",
-  };
-
-  public static final SimpleDateFormat DATE_FMT = new SimpleDateFormat("yyyy-MMM-dd");
-
-  private Date[] pairs;
-
-  @BeforeTest
-  public void setUp() {
-    pairs = new Date[TEST_PAIRS.length];
-    for (int i = 0; i < TEST_PAIRS.length; i++) {
-      try {
-        pairs[i] = DATE_FMT.parse(TEST_PAIRS[i]);
-      } catch (ParseException e) {
-        log.error("Parsing exception while setup.", e);
-      }
-    }
-  }
-
-
-  @Test
-  public void testMonthsBetween() throws Exception {
-    int i = 0;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
-      new CoveringInfo(1, true),
-      "2013-Jan-01 to 2013-Jan-31");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
-      new CoveringInfo(5, true),
-      "2013-Jan-01 to 2013-May-31");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
-      new CoveringInfo(12, true),
-      "2013-Jan-01 to 2013-Dec-31");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(2, false),
-      "2013-Feb-01 to 2013-Apr-25");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(12, true),
-      "2012-Feb-01 to 2013-Feb-01");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(24, true),
-      "2011-Feb-01 to 2013-Feb-01");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "2013-Jan-02 to 2013-Feb-02");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(1, false),
-      "2013-Jan-02 to 2013-Mar-02");
-  }
-
-  @Test
-  public void testQuartersBetween() throws Exception {
-    int i = 0;
-    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "2013-Jan-01 to 2013-Jan-31");
-
-    i += 2;
-    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(1, false),
-      "2013-Jan-01 to 2013-May-31");
-
-    i += 2;
-    assertEquals(getQuarterlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
-      new CoveringInfo(4, true),
-      "2013-Jan-01 to 2013-Dec-31");
-
-    i += 2;
-    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "2013-Feb-01 to 2013-Apr-25");
-
-    i += 2;
-    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(3, false),
-      "2012-Feb-01 to 2013-Feb-01");
-
-    i += 2;
-    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(7, false),
-      "2011-Feb-01 to 2013-Feb-01");
-  }
-
-  @Test
-  public void testYearsBetween() throws Exception {
-    int i = 0;
-    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "" + pairs[i] + "->" + pairs[i + 1]);
-
-    i += 2;
-    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "" + pairs[i] + "->" + pairs[i + 1]);
-
-    i += 2;
-    assertEquals(getYearlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
-      new CoveringInfo(1, true), ""
-        + pairs[i] + "->" + pairs[i + 1]);
-
-    i += 2;
-    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "" + pairs[i] + "->" + pairs[i + 1]);
-
-    i += 2;
-    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "" + pairs[i] + "->" + pairs[i + 1]);
-
-    i += 2;
-    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(1, false),
-      "" + pairs[i] + "->" + pairs[i + 1]);
-  }
-
-  @Test
-  public void testWeeksBetween() throws Exception {
-    CoveringInfo weeks;
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-2"));
-    assertEquals(weeks, new CoveringInfo(1, true), "2013-May-26 to 2013-Jun-2");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-3"));
-    assertEquals(weeks, new CoveringInfo(0, false), "2013-May-26 to 2013-Jun-2");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-9"));
-    assertEquals(weeks, new CoveringInfo(1, false), "2013-May-26 to 2013-Jun-2");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-1"));
-    assertEquals(weeks, new CoveringInfo(0, false), "2013-May-27 to 2013-Jun-1");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-25"), DATE_FMT.parse("2013-Jun-2"));
-    assertEquals(weeks, new CoveringInfo(1, false), "2013-May-25 to 2013-Jun-1");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-9"));
-    assertEquals(weeks, new CoveringInfo(2, true), "2013-May-26 to 2013-Jun-8");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-10"));
-    assertEquals(weeks, new CoveringInfo(2, false), "2013-May-26 to 2013-Jun-10");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2015-Dec-27"), DATE_FMT.parse("2016-Jan-03"));
-    assertEquals(weeks, new CoveringInfo(1, true), "2015-Dec-27 to 2016-Jan-03");
-  }
-
-  @Test
-  public void testNowWithGranularity() throws Exception {
-    String dateFmt = "yyyy/MM/dd-HH.mm.ss.SSS";
-    // Tuesday Sept 23, 2014, 12.02.05.500 pm
-    String testDateStr = "2014/09/23-12.02.05.500";
-    final SimpleDateFormat sdf = new SimpleDateFormat(dateFmt);
-    final Date testDate = sdf.parse(testDateStr);
-
-    System.out.print("@@ testDateStr=" + testDateStr + " parsed date=" + testDate);
-
-    // Tests without a diff, just resolve now with different granularity
-    assertEquals(testDateStr, sdf.format(resolveDate("now", testDate)));
-    assertEquals("2014/01/01-00.00.00.000", sdf.format(resolveDate("now.year", testDate)));
-    assertEquals("2014/09/01-00.00.00.000", sdf.format(resolveDate("now.month", testDate)));
-    // Start of week resolves to Sunday
-    assertEquals("2014/09/21-00.00.00.000", sdf.format(resolveDate("now.week", testDate)));
-    assertEquals("2014/09/23-00.00.00.000", sdf.format(resolveDate("now.day", testDate)));
-    assertEquals("2014/09/23-12.00.00.000", sdf.format(resolveDate("now.hour", testDate)));
-    assertEquals("2014/09/23-12.02.00.000", sdf.format(resolveDate("now.minute", testDate)));
-    assertEquals("2014/09/23-12.02.05.000", sdf.format(resolveDate("now.second", testDate)));
-
-    // Tests with a diff
-    assertEquals("2014/09/22-00.00.00.000", sdf.format(resolveDate("now.day -1day", testDate)));
-    assertEquals("2014/09/23-10.00.00.000", sdf.format(resolveDate("now.hour -2hour", testDate)));
-    assertEquals("2014/09/24-12.00.00.000", sdf.format(resolveDate("now.hour +24hour", testDate)));
-    assertEquals("2015/01/01-00.00.00.000", sdf.format(resolveDate("now.year +1year", testDate)));
-    assertEquals("2014/02/01-00.00.00.000", sdf.format(resolveDate("now.year +1month", testDate)));
-  }
-
-  @Test
-  public void testFloorDate() throws ParseException {
-    Date date = ABSDATE_PARSER.get().parse("2015-01-01-00:00:00,000");
-    Date curDate = date;
-    for (int i = 0; i < 284; i++) {
-      assertEquals(getFloorDate(curDate, YEARLY), date);
-      curDate = addMilliseconds(curDate, 111111111);
-    }
-    assertEquals(getFloorDate(curDate, YEARLY), DateUtils.addYears(date, 1));
-    assertEquals(getFloorDate(date, WEEKLY), ABSDATE_PARSER.get().parse("2014-12-28-00:00:00,000"));
-  }
-
-  @Test
-  public void testCeilDate() throws ParseException {
-    Date date = ABSDATE_PARSER.get().parse("2015-12-26-06:30:15,040");
-    assertEquals(getCeilDate(date, YEARLY), ABSDATE_PARSER.get().parse("2016-01-01-00:00:00,000"));
-    assertEquals(getCeilDate(date, MONTHLY), ABSDATE_PARSER.get().parse("2016-01-01-00:00:00,000"));
-    assertEquals(getCeilDate(date, DAILY), ABSDATE_PARSER.get().parse("2015-12-27-00:00:00,000"));
-    assertEquals(getCeilDate(date, HOURLY), ABSDATE_PARSER.get().parse("2015-12-26-07:00:00,000"));
-    assertEquals(getCeilDate(date, MINUTELY), ABSDATE_PARSER.get().parse("2015-12-26-06:31:00,000"));
-    assertEquals(getCeilDate(date, SECONDLY), ABSDATE_PARSER.get().parse("2015-12-26-06:30:16,000"));
-    assertEquals(getCeilDate(date, WEEKLY), ABSDATE_PARSER.get().parse("2015-12-27-00:00:00,000"));
-  }
-
-  @Test
-  public void testTimeDiff() throws LensException {
-    ArrayList<String> minusFourDays =
-      Lists.newArrayList("-4 days", "-4days", "-4day", "-4 day", "- 4days", "- 4 day");
-    ArrayList<String> plusFourDays =
-      Lists.newArrayList("+4 days", "4 days", "+4days", "4day", "4 day", "+ 4days", "+ 4 day", "+4 day");
-    Set<TimeDiff> diffs = Sets.newHashSet();
-    for (String diffStr : minusFourDays) {
-      diffs.add(TimeDiff.parseFrom(diffStr));
-    }
-    assertEquals(diffs.size(), 1);
-    TimeDiff minusFourDaysDiff = diffs.iterator().next();
-    assertEquals(minusFourDaysDiff.quantity, -4);
-    assertEquals(minusFourDaysDiff.updatePeriod, DAILY);
-
-    diffs.clear();
-    for (String diffStr : plusFourDays) {
-      diffs.add(TimeDiff.parseFrom(diffStr));
-    }
-    assertEquals(diffs.size(), 1);
-    TimeDiff plusFourDaysDiff = diffs.iterator().next();
-    assertEquals(plusFourDaysDiff.quantity, 4);
-    assertEquals(plusFourDaysDiff.updatePeriod, DAILY);
-    Date now = new Date();
-    assertEquals(minusFourDaysDiff.offsetFrom(plusFourDaysDiff.offsetFrom(now)), now);
-    assertEquals(plusFourDaysDiff.offsetFrom(minusFourDaysDiff.offsetFrom(now)), now);
-    assertEquals(minusFourDaysDiff.negativeOffsetFrom(now), plusFourDaysDiff.offsetFrom(now));
-    assertEquals(minusFourDaysDiff.offsetFrom(now), plusFourDaysDiff.negativeOffsetFrom(now));
-  }
-
-  @Test
-  public void testRelativeToAbsolute() throws LensException {
-    Date now = new Date();
-    Date nowDay = DateUtils.truncate(now, DAY_OF_MONTH);
-    Date nowDayMinus2Days = DateUtils.add(nowDay, DAY_OF_MONTH, -2);
-    assertEquals(relativeToAbsolute("now", now), DateUtil.ABSDATE_PARSER.get().format(now));
-    assertEquals(relativeToAbsolute("now.day", now), DateUtil.ABSDATE_PARSER.get().format(nowDay));
-    assertEquals(relativeToAbsolute("now.day - 2 days", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-    assertEquals(relativeToAbsolute("now.day - 2 day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-    assertEquals(relativeToAbsolute("now.day - 2day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-    assertEquals(relativeToAbsolute("now.day -2 day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-    assertEquals(relativeToAbsolute("now.day -2 days", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-  }
-}


[41/51] [abbrv] lens git commit: LENS-921 : Clean unaccessed InMemoryResultSet intances form server

Posted by de...@apache.org.
LENS-921 : Clean unaccessed InMemoryResultSet intances form server


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/2539f338
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/2539f338
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/2539f338

Branch: refs/heads/current-release-line
Commit: 2539f338ad06e2c4a93ba6f816a837757aa5a9f1
Parents: f1d6e3f
Author: Puneet Gupta <pu...@gmail.com>
Authored: Mon Feb 1 14:14:03 2016 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Mon Feb 1 14:14:03 2016 +0530

----------------------------------------------------------------------
 .../lens/server/api/LensConfConstants.java      |  11 ++
 .../server/api/driver/InMemoryResultSet.java    |   8 +-
 .../server/query/QueryExecutionServiceImpl.java |  21 ++-
 .../src/main/resources/lensserver-default.xml   |  10 ++
 .../lens/server/query/TestQueryService.java     |  54 +++++++
 lens-server/src/test/resources/lens-site.xml    |   2 +-
 src/site/apt/admin/config.apt                   | 156 ++++++++++---------
 src/site/apt/admin/session-config.apt           |   2 +-
 8 files changed, 182 insertions(+), 82 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/2539f338/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java b/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
index a3dbfc0..8df389b 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
@@ -1006,4 +1006,15 @@ public final class LensConfConstants {
    * The Constant DEFAULT_EXCLUDE_CUBE_TABLES.
    */
   public static final boolean DEFAULT_EXCLUDE_CUBE_TABLES = true;
+
+  /**
+   * This property defines the TTL secs for all result sets of
+   * type {@link org.apache.lens.server.api.driver.InMemoryResultSet} beyond which they are eligible for purging
+   */
+  public static final String INMEMORY_RESULT_SET_TTL_SECS = SERVER_PFX + "inmemory.resultset.ttl.secs";
+
+  /**
+   * Default value of  INMEMORY_RESULT_SET_TTL_SECS is 300 secs (5 minutes)
+   */
+  public static final int DEFAULT_INMEMORY_RESULT_SET_TTL_SECS = 300;
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/2539f338/lens-server-api/src/main/java/org/apache/lens/server/api/driver/InMemoryResultSet.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/InMemoryResultSet.java b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/InMemoryResultSet.java
index c64a3dd..f6434da 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/InMemoryResultSet.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/InMemoryResultSet.java
@@ -25,6 +25,7 @@ import org.apache.lens.api.query.InMemoryQueryResult;
 import org.apache.lens.api.query.ResultRow;
 import org.apache.lens.server.api.error.LensException;
 
+import lombok.Getter;
 import lombok.Setter;
 
 /**
@@ -32,11 +33,14 @@ import lombok.Setter;
  */
 public abstract class InMemoryResultSet extends LensResultSet {
 
-  public abstract boolean seekToStart() throws LensException;
-
   @Setter
   private boolean fullyAccessed = false;
 
+  @Getter
+  private long creationTime = System.currentTimeMillis();;
+
+  public abstract boolean seekToStart() throws LensException;
+
   @Override
   public boolean canBePurged() {
     return fullyAccessed;

http://git-wip-us.apache.org/repos/asf/lens/blob/2539f338/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index 672f2be..19077d2 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -273,6 +273,14 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
   private final ExecutorService waitingQueriesSelectionSvc = Executors.newSingleThreadExecutor();
 
   /**
+   * This is the TTL millis for all result sets of type {@link org.apache.lens.server.api.driver.InMemoryResultSet}
+   * Note : this field is non final and has a Getter and Setter for test cases
+   */
+  @Getter
+  @Setter
+  private long inMemoryResultsetTTLMillis;
+
+  /**
    * The driver event listener.
    */
   final LensEventListener<DriverEvent> driverEventListener = new LensEventListener<DriverEvent>() {
@@ -533,7 +541,14 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
         if (getCtx().getStatus().getStatus().equals(SUCCESSFUL)) {
           if (getCtx().getStatus().isResultSetAvailable()) {
             LensResultSet rs = getResultset();
-            log.info("Resultset for {} is {}", getQueryHandle(), rs);
+            log.info("Resultset for {} is {}", getQueryHandle(), rs.getClass().getSimpleName());
+            if (rs instanceof InMemoryResultSet
+                && System.currentTimeMillis()
+                > ((InMemoryResultSet) rs).getCreationTime() + inMemoryResultsetTTLMillis) {
+              log.info("InMemoryResultSet for query {} has exceeded its TTL and is eligible for purging now",
+                  getQueryHandle());
+              return true;
+            }
             return rs.canBePurged();
           }
         }
@@ -1108,6 +1123,10 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
     }
     purgeInterval = conf.getInt(PURGE_INTERVAL, DEFAULT_PURGE_INTERVAL);
     initalizeFinishedQueryStore(conf);
+
+    inMemoryResultsetTTLMillis = conf.getInt(
+        LensConfConstants.INMEMORY_RESULT_SET_TTL_SECS, LensConfConstants.DEFAULT_INMEMORY_RESULT_SET_TTL_SECS) * 1000;
+
     log.info("Query execution service initialized");
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/2539f338/lens-server/src/main/resources/lensserver-default.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/main/resources/lensserver-default.xml b/lens-server/src/main/resources/lensserver-default.xml
index cac641a..881c159 100644
--- a/lens-server/src/main/resources/lensserver-default.xml
+++ b/lens-server/src/main/resources/lensserver-default.xml
@@ -308,6 +308,16 @@
     <value>10000</value>
     <description>The interval(milliseconds) with which purger to run periodically. Default 10 sec. </description>
   </property>
+
+  <property>
+    <name>lens.server.inmemory.resultset.ttl.secs</name>
+    <value>300</value>
+    <description>This property defines the TTL(time to live) in seconds for all result sets of type InMemoryResultSet
+     beyond which they are eligible for purging irrespective of whether the result set has been read or not.
+     The default value is 300 seconds (5 minutes).
+     </description>
+  </property>
+
   <property>
     <name>lens.server.domain</name>
     <value>company.com</value>

http://git-wip-us.apache.org/repos/asf/lens/blob/2539f338/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index 3facded..5d949d2 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -53,6 +53,7 @@ import org.apache.lens.server.LensJerseyTest;
 import org.apache.lens.server.LensServerTestUtil;
 import org.apache.lens.server.LensServices;
 import org.apache.lens.server.api.LensConfConstants;
+import org.apache.lens.server.api.driver.InMemoryResultSet;
 import org.apache.lens.server.api.driver.LensDriver;
 import org.apache.lens.server.api.error.LensDriverErrorCode;
 import org.apache.lens.server.api.error.LensException;
@@ -89,6 +90,7 @@ import org.testng.annotations.Test;
 
 import com.codahale.metrics.MetricRegistry;
 import com.google.common.base.Optional;
+
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -984,6 +986,58 @@ public class TestQueryService extends LensJerseyTest {
     validNotFoundForHttpResult(target(), lensSessionId, handle);
   }
 
+  @Test
+  public void testTTLForInMemoryResult() throws InterruptedException, IOException, LensException {
+    long inMemoryresultsetTTLMillisBackup = queryService.getInMemoryResultsetTTLMillis();
+    queryService.setInMemoryResultsetTTLMillis(5000); // 5 secs
+    try {
+      // test post execute op
+      final WebTarget target = target().path("queryapi/queries");
+
+      final FormDataMultiPart mp = new FormDataMultiPart();
+      LensConf conf = new LensConf();
+      conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
+      conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, "false");
+      conf.addProperty(LensConfConstants.QUERY_MAIL_NOTIFY, "false");
+      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
+          MediaType.APPLICATION_XML_TYPE));
+      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID, IDSTR from "
+          + TEST_TABLE));
+      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
+      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
+          MediaType.APPLICATION_XML_TYPE));
+
+      final QueryHandle handle =
+          target
+              .request()
+              .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+                  new GenericType<LensAPIResult<QueryHandle>>() {
+                  }).getData();
+      assertNotNull(handle);
+
+      waitForQueryToFinish(target(), lensSessionId, handle, Status.SUCCESSFUL);
+
+      // Check TTL
+      QueryContext ctx = queryService.getQueryContext(lensSessionId, handle);
+      long softExpiryTime = ctx.getDriverStatus().getDriverFinishTime()
+          + queryService.getInMemoryResultsetTTLMillis() - 1000; //Keeping buffer of 1 secs
+      int checkCount = 0;
+      while (System.currentTimeMillis() < softExpiryTime) {
+        assertEquals(queryService.getFinishedQueriesCount(), 1);
+        assertEquals(queryService.finishedQueries.peek().canBePurged(), false);
+        assertEquals(((InMemoryResultSet) queryService.getResultset(handle)).canBePurged(), false);
+        checkCount++;
+        Thread.sleep(1000); // sleep for 1 secs and then check again
+      }
+      assertTrue(checkCount >= 2, "CheckCount = " + checkCount); // TTl check at least twice
+
+      Thread.sleep(3000); // should be past TTL after this sleep . purge thread runs every 1 secs for Tests
+      assertEquals(queryService.getFinishedQueriesCount(), 0);
+    } finally {
+      queryService.setInMemoryResultsetTTLMillis(inMemoryresultsetTTLMillisBackup);
+    }
+  }
+
   /**
    * Test execute async temp table.
    *

http://git-wip-us.apache.org/repos/asf/lens/blob/2539f338/lens-server/src/test/resources/lens-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/lens-site.xml b/lens-server/src/test/resources/lens-site.xml
index 9cb4a6f..c3187a8 100644
--- a/lens-server/src/test/resources/lens-site.xml
+++ b/lens-server/src/test/resources/lens-site.xml
@@ -128,7 +128,7 @@
 
   <property>
     <!-- run every second -->
-    <name>lens.server.purge.interval</name>
+    <name>lens.server.querypurger.sleep.interval</name>
     <value>1000</value>
   </property>
 

http://git-wip-us.apache.org/repos/asf/lens/blob/2539f338/src/site/apt/admin/config.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/admin/config.apt b/src/site/apt/admin/config.apt
index 54f827e..703abb3 100644
--- a/src/site/apt/admin/config.apt
+++ b/src/site/apt/admin/config.apt
@@ -91,160 +91,162 @@ Lens server configuration
 *--+--+---+--+
 |32|lens.server.index.ws.resource.impl|org.apache.lens.server.IndexResource|Implementation class for Index Resource|
 *--+--+---+--+
-|33|lens.server.log.ws.resource.impl|org.apache.lens.server.LogResource|Implementation class for Log Resource|
+|33|lens.server.inmemory.resultset.ttl.secs|300|This property defines the TTL(time to live) in seconds for all result sets of type InMemoryResultSet beyond which they are eligible for purging irrespective of whether the result set has been read or not. The default value is 300 seconds (5 minutes).|
 *--+--+---+--+
-|34|lens.server.mail.from.address|blah@company.com|The from field in the notifier mail to the submitter.|
+|34|lens.server.log.ws.resource.impl|org.apache.lens.server.LogResource|Implementation class for Log Resource|
 *--+--+---+--+
-|35|lens.server.mail.host|mail-host.company.com|SMTP Host for sending mail|
+|35|lens.server.mail.from.address|blah@company.com|The from field in the notifier mail to the submitter.|
 *--+--+---+--+
-|36|lens.server.mail.port|25|SMTP Port|
+|36|lens.server.mail.host|mail-host.company.com|SMTP Host for sending mail|
 *--+--+---+--+
-|37|lens.server.mail.smtp.connectiontimeout|15000|Socket connection timeout value in milliseconds. This timeout is implemented by java.net.Socket. Default is 15 seconds.|
+|37|lens.server.mail.port|25|SMTP Port|
 *--+--+---+--+
-|38|lens.server.mail.smtp.timeout|30000|Socket read timeout value in milliseconds. This timeout is implemented by java.net.Socket. Default is 30 seconds.|
+|38|lens.server.mail.smtp.connectiontimeout|15000|Socket connection timeout value in milliseconds. This timeout is implemented by java.net.Socket. Default is 15 seconds.|
 *--+--+---+--+
-|39|lens.server.metastore.service.impl|org.apache.lens.server.metastore.CubeMetastoreServiceImpl|Implementation class for metastore service|
+|39|lens.server.mail.smtp.timeout|30000|Socket read timeout value in milliseconds. This timeout is implemented by java.net.Socket. Default is 30 seconds.|
 *--+--+---+--+
-|40|lens.server.metastore.ws.resource.impl|org.apache.lens.server.metastore.MetastoreResource|Implementation class for Metastore Resource|
+|40|lens.server.metastore.service.impl|org.apache.lens.server.metastore.CubeMetastoreServiceImpl|Implementation class for metastore service|
 *--+--+---+--+
-|41|lens.server.metrics.csv.directory.path|metrics/|Path of the directory in which to report metrics as separate csv files.|
+|41|lens.server.metastore.ws.resource.impl|org.apache.lens.server.metastore.MetastoreResource|Implementation class for Metastore Resource|
 *--+--+---+--+
-|42|lens.server.metrics.ganglia.host| |The ganglia host name|
+|42|lens.server.metrics.csv.directory.path|metrics/|Path of the directory in which to report metrics as separate csv files.|
 *--+--+---+--+
-|43|lens.server.metrics.ganglia.port| |The ganglia port|
+|43|lens.server.metrics.ganglia.host| |The ganglia host name|
 *--+--+---+--+
-|44|lens.server.metrics.graphite.host| |The graphite host name|
+|44|lens.server.metrics.ganglia.port| |The ganglia port|
 *--+--+---+--+
-|45|lens.server.metrics.graphite.port| |The graphite port|
+|45|lens.server.metrics.graphite.host| |The graphite host name|
 *--+--+---+--+
-|46|lens.server.metrics.reporting.period|10|The reporting period for metrics. The value is in seconds|
+|46|lens.server.metrics.graphite.port| |The graphite port|
 *--+--+---+--+
-|47|lens.server.mode|OPEN|The mode in which server should run. Allowed values are OPEN, READ_ONLY, METASTORE_READONLY, METASTORE_NODROP. OPEN mode will allow all requests. READ_ONLY mode will allow all requests on session resouce and only GET requests on all other resources. METASTORE_READONLY will allow GET on metastore and all other requests in other services. METASTORE_NODROP will not allow DELETE on metastore, will allow all other requests.|
+|47|lens.server.metrics.reporting.period|10|The reporting period for metrics. The value is in seconds|
 *--+--+---+--+
-|48|lens.server.multipart.ws.feature.impl|org.glassfish.jersey.media.multipart.MultiPartFeature|Implementation class for query scheduler resource|
+|48|lens.server.mode|OPEN|The mode in which server should run. Allowed values are OPEN, READ_ONLY, METASTORE_READONLY, METASTORE_NODROP. OPEN mode will allow all requests. READ_ONLY mode will allow all requests on session resouce and only GET requests on all other resources. METASTORE_READONLY will allow GET on metastore and all other requests in other services. METASTORE_NODROP will not allow DELETE on metastore, will allow all other requests.|
 *--+--+---+--+
-|49|lens.server.persist.location|file:///tmp/lensserver|The directory in which lens server will persist its state when it is going down. The location be on any Hadoop compatible file system. Server will read from the location when it is restarted and recovery is enabled. So, Server should have both read and write permissions to the location|
+|49|lens.server.multipart.ws.feature.impl|org.glassfish.jersey.media.multipart.MultiPartFeature|Implementation class for query scheduler resource|
 *--+--+---+--+
-|50|lens.server.query.acceptors| |Query Acceptors configured. Query acceptors are consulted first, before anything happens for the given query. They can either return null or return a messaging indicating why the given query shouldn't be accepted. These can be used to filter out queries at the earliest.|
+|50|lens.server.persist.location|file:///tmp/lensserver|The directory in which lens server will persist its state when it is going down. The location be on any Hadoop compatible file system. Server will read from the location when it is restarted and recovery is enabled. So, Server should have both read and write permissions to the location|
 *--+--+---+--+
-|51|lens.server.query.launching.constraint.factories|org.apache.lens.server.query.constraint.TotalQueryCostCeilingConstraintFactory|Factories used to instantiate constraints enforced on queries by lens. Every Factory should be an implementation of org.apache.lens.server.api.common.ConfigBasedObjectCreationFactory and create an implementation of org.apache.lens.server.api.query.constraint.QueryLaunchingConstraint. A query will be launched only if all constraints pass.|
+|51|lens.server.query.acceptors| |Query Acceptors configured. Query acceptors are consulted first, before anything happens for the given query. They can either return null or return a messaging indicating why the given query shouldn't be accepted. These can be used to filter out queries at the earliest.|
 *--+--+---+--+
-|52|lens.server.query.phase1.rewriters| |Query phase 1 rewriters. This is to convert user query to cube query. The resulting cube query will be passed for validation and rewriting to hql query.\ |
+|52|lens.server.query.launching.constraint.factories|org.apache.lens.server.query.constraint.TotalQueryCostCeilingConstraintFactory|Factories used to instantiate constraints enforced on queries by lens. Every Factory should be an implementation of org.apache.lens.server.api.common.ConfigBasedObjectCreationFactory and create an implementation of org.apache.lens.server.api.query.constraint.QueryLaunchingConstraint. A query will be launched only if all constraints pass.|
+*--+--+---+--+
+|53|lens.server.query.phase1.rewriters| |Query phase 1 rewriters. This is to convert user query to cube query. The resulting cube query will be passed for validation and rewriting to hql query.\ |
 |  |                                  | |Use cases will be to use extra intelligence to convert user query to optimized cube query.                                                              \ |
 |  |                                  | |Or define shortcuts for certain frequently used queries :)                                                                                                |
 *--+--+---+--+
-|53|lens.server.query.resultset.retention|1 day|Lens query resultset retention period. Default 1 day|
+|54|lens.server.query.resultset.retention|1 day|Lens query resultset retention period. Default 1 day|
 *--+--+---+--+
-|54|lens.server.query.service.impl|org.apache.lens.server.query.QueryExecutionServiceImpl|Implementation class for query execution service|
+|55|lens.server.query.service.impl|org.apache.lens.server.query.QueryExecutionServiceImpl|Implementation class for query execution service|
 *--+--+---+--+
-|55|lens.server.query.state.logger.enabled|true|Disable or enable the query state logger with this config. The location for the logger can be specified in logback xml for the class org.apache.lens.server.query.QueryExecutionServiceImpl.QueryStatusLogger|
+|56|lens.server.query.state.logger.enabled|true|Disable or enable the query state logger with this config. The location for the logger can be specified in logback xml for the class org.apache.lens.server.query.QueryExecutionServiceImpl.QueryStatusLogger|
 *--+--+---+--+
-|56|lens.server.query.ws.resource.impl|org.apache.lens.server.query.QueryServiceResource|Implementation class for Query Resource|
+|57|lens.server.query.ws.resource.impl|org.apache.lens.server.query.QueryServiceResource|Implementation class for Query Resource|
 *--+--+---+--+
-|57|lens.server.querypurger.sleep.interval|10000|The interval(milliseconds) with which purger to run periodically. Default 10 sec.|
+|58|lens.server.querypurger.sleep.interval|10000|The interval(milliseconds) with which purger to run periodically. Default 10 sec.|
 *--+--+---+--+
-|58|lens.server.quota.service.impl|org.apache.lens.server.quota.QuotaServiceImpl|Implementation class for quota service|
+|59|lens.server.quota.service.impl|org.apache.lens.server.quota.QuotaServiceImpl|Implementation class for quota service|
 *--+--+---+--+
-|59|lens.server.quota.ws.resource.impl|org.apache.lens.server.quota.QuotaResource|Implementation class for Quota Resource|
+|60|lens.server.quota.ws.resource.impl|org.apache.lens.server.quota.QuotaResource|Implementation class for Quota Resource|
 *--+--+---+--+
-|60|lens.server.recover.onrestart|true|If the flag is enabled, all the services will be started from last saved state, if disabled all the services will start afresh|
+|61|lens.server.recover.onrestart|true|If the flag is enabled, all the services will be started from last saved state, if disabled all the services will start afresh|
 *--+--+---+--+
-|61|lens.server.restart.enabled|true|If flag is enabled, all the services will be persisted to persistent location passed.|
+|62|lens.server.restart.enabled|true|If flag is enabled, all the services will be persisted to persistent location passed.|
 *--+--+---+--+
-|62|lens.server.resultset.purge.enabled|false|Whether to purge the query results|
+|63|lens.server.resultset.purge.enabled|false|Whether to purge the query results|
 *--+--+---+--+
-|63|lens.server.resultsetpurger.sleep.interval.secs|3600|Periodicity for Query result purger runs. Default 1 hour.|
+|64|lens.server.resultsetpurger.sleep.interval.secs|3600|Periodicity for Query result purger runs. Default 1 hour.|
 *--+--+---+--+
-|64|lens.server.savedquery.jdbc.dialectclass|org.apache.lens.server.query.save.SavedQueryDao$HSQLDialect|Dialect of the target DB, Default is HSQL. Override with the target DB used.|
+|65|lens.server.savedquery.jdbc.dialectclass|org.apache.lens.server.query.save.SavedQueryDao$HSQLDialect|Dialect of the target DB, Default is HSQL. Override with the target DB used.|
 *--+--+---+--+
-|65|lens.server.savedquery.list.default.count|20|Key denoting the default fetch value of saved query list api.|
+|66|lens.server.savedquery.list.default.count|20|Key denoting the default fetch value of saved query list api.|
 *--+--+---+--+
-|66|lens.server.savedquery.list.default.offset|0|Key denoting the default start value of saved query list api.|
+|67|lens.server.savedquery.list.default.offset|0|Key denoting the default start value of saved query list api.|
 *--+--+---+--+
-|67|lens.server.savedquery.service.impl|org.apache.lens.server.query.save.SavedQueryServiceImpl|Implementation class for saved query service|
+|68|lens.server.savedquery.service.impl|org.apache.lens.server.query.save.SavedQueryServiceImpl|Implementation class for saved query service|
 *--+--+---+--+
-|68|lens.server.savedquery.ws.resource.impl|org.apache.lens.server.query.save.SavedQueryResource|Implementation class for Saved query Resource|
+|69|lens.server.savedquery.ws.resource.impl|org.apache.lens.server.query.save.SavedQueryResource|Implementation class for Saved query Resource|
 *--+--+---+--+
-|69|lens.server.scheduler.service.impl|org.apache.lens.server.scheduler.QuerySchedulerService|Implementation class for query scheduler service|
+|70|lens.server.scheduler.service.impl|org.apache.lens.server.scheduler.SchedulerServiceImpl|Implementation class for query scheduler service|
 *--+--+---+--+
-|70|lens.server.scheduler.ws.resource.impl|org.apache.lens.server.scheduler.ScheduleResource|Implementation class for query scheduler resource|
+|71|lens.server.scheduler.ws.resource.impl|org.apache.lens.server.scheduler.ScheduleResource|Implementation class for query scheduler resource|
 *--+--+---+--+
-|71|lens.server.scheduling.queue.poll.interval.millisec|2000|The interval at which submission thread will poll scheduling queue to fetch the next query for submission. If value is less than equal to 0, then it would mean that thread will continuosly poll without sleeping. The interval has to be given in milliseconds.|
+|72|lens.server.scheduling.queue.poll.interval.millisec|2000|The interval at which submission thread will poll scheduling queue to fetch the next query for submission. If value is less than equal to 0, then it would mean that thread will continuosly poll without sleeping. The interval has to be given in milliseconds.|
 *--+--+---+--+
-|72|lens.server.serverMode.ws.filter.impl|org.apache.lens.server.ServerModeFilter|Implementation class for ServerMode Filter|
+|73|lens.server.serverMode.ws.filter.impl|org.apache.lens.server.ServerModeFilter|Implementation class for ServerMode Filter|
 *--+--+---+--+
-|73|lens.server.service.provider.factory|org.apache.lens.server.ServiceProviderFactoryImpl|Service provider factory implementation class. This parameter is used to lookup the factory implementation class name that would provide an instance of ServiceProvider. Users should instantiate the class to obtain its instance. Example -- Class spfClass = conf.getClass("lens.server.service.provider.factory", null, ServiceProviderFactory.class); ServiceProviderFactory spf = spfClass.newInstance(); ServiceProvider serviceProvider = spf.getServiceProvider(); -- This is not supposed to be overridden by users.|
+|74|lens.server.service.provider.factory|org.apache.lens.server.ServiceProviderFactoryImpl|Service provider factory implementation class. This parameter is used to lookup the factory implementation class name that would provide an instance of ServiceProvider. Users should instantiate the class to obtain its instance. Example -- Class spfClass = conf.getClass("lens.server.service.provider.factory", null, ServiceProviderFactory.class); ServiceProviderFactory spf = spfClass.newInstance(); ServiceProvider serviceProvider = spf.getServiceProvider(); -- This is not supposed to be overridden by users.|
 *--+--+---+--+
-|74|lens.server.servicenames|session,query,metastore,scheduler,quota|These services would be started in the specified order when lens-server starts up|
+|75|lens.server.servicenames|session,query,metastore,scheduler,quota|These services would be started in the specified order when lens-server starts up|
 *--+--+---+--+
-|75|lens.server.session.expiry.service.interval.secs|3600|Interval at which lens session expiry service runs|
+|76|lens.server.session.expiry.service.interval.secs|3600|Interval at which lens session expiry service runs|
 *--+--+---+--+
-|76|lens.server.session.service.impl|org.apache.lens.server.session.HiveSessionService|Implementation class for session service|
+|77|lens.server.session.service.impl|org.apache.lens.server.session.HiveSessionService|Implementation class for session service|
 *--+--+---+--+
-|77|lens.server.session.timeout.seconds|86400|Lens session timeout in seconds.If there is no activity on the session for this period then the session will be closed.Default timeout is one day.|
+|78|lens.server.session.timeout.seconds|86400|Lens session timeout in seconds.If there is no activity on the session for this period then the session will be closed.Default timeout is one day.|
 *--+--+---+--+
-|78|lens.server.session.ws.resource.impl|org.apache.lens.server.session.SessionResource|Implementation class for Session Resource|
+|79|lens.server.session.ws.resource.impl|org.apache.lens.server.session.SessionResource|Implementation class for Session Resource|
 *--+--+---+--+
-|79|lens.server.snapshot.interval|300000|Snapshot interval time in miliseconds for saving lens server state.|
+|80|lens.server.snapshot.interval|300000|Snapshot interval time in miliseconds for saving lens server state.|
 *--+--+---+--+
-|80|lens.server.state.persist.out.stream.buffer.size|1048576|Output Stream Buffer Size used in writing lens server state to file system. Size is in bytes.|
+|81|lens.server.state.persist.out.stream.buffer.size|1048576|Output Stream Buffer Size used in writing lens server state to file system. Size is in bytes.|
 *--+--+---+--+
-|81|lens.server.statistics.db|lensstats|Database to which statistics tables are created and partitions are added.|
+|82|lens.server.statistics.db|lensstats|Database to which statistics tables are created and partitions are added.|
 *--+--+---+--+
-|82|lens.server.statistics.log.rollover.interval|3600000|Default rate which log statistics store scans for rollups in milliseconds.|
+|83|lens.server.statistics.log.rollover.interval|3600000|Default rate which log statistics store scans for rollups in milliseconds.|
 *--+--+---+--+
-|83|lens.server.statistics.store.class|org.apache.lens.server.stats.store.log.LogStatisticsStore|Default implementation of class used to persist Lens Statistics.|
+|84|lens.server.statistics.store.class|org.apache.lens.server.stats.store.log.LogStatisticsStore|Default implementation of class used to persist Lens Statistics.|
 *--+--+---+--+
-|84|lens.server.statistics.warehouse.dir|file:///tmp/lens/statistics/warehouse|Default top level location where stats are moved by the log statistics store.|
+|85|lens.server.statistics.warehouse.dir|file:///tmp/lens/statistics/warehouse|Default top level location where stats are moved by the log statistics store.|
 *--+--+---+--+
-|85|lens.server.total.query.cost.ceiling.per.user|-1.0|A query submitted by user will be launched only if total query cost of all current launched queries of user is less than or equal to total query cost ceiling defined by this property. This configuration value is only useful when TotalQueryCostCeilingConstraint is enabled by using org.apache.lens.server.query.constraint.TotalQueryCostCeilingConstraintFactory as one of the factories in lens.server.query.constraint.factories property. Default is -1.0 which means that there is no limit on the total query cost of launched queries submitted by a user.|
+|86|lens.server.total.query.cost.ceiling.per.user|-1.0|A query submitted by user will be launched only if total query cost of all current launched queries of user is less than or equal to total query cost ceiling defined by this property. This configuration value is only useful when TotalQueryCostCeilingConstraint is enabled by using org.apache.lens.server.query.constraint.TotalQueryCostCeilingConstraintFactory as one of the factories in lens.server.query.constraint.factories property. Default is -1.0 which means that there is no limit on the total query cost of launched queries submitted by a user.|
 *--+--+---+--+
-|86|lens.server.ui.base.uri|http://0.0.0.0:19999/|The base url for the Lens UI Server|
+|87|lens.server.ui.base.uri|http://0.0.0.0:19999/|The base url for the Lens UI Server|
 *--+--+---+--+
-|87|lens.server.ui.enable|true|Bringing up the ui server is optional. By default it brings up UI server.|
+|88|lens.server.ui.enable|true|Bringing up the ui server is optional. By default it brings up UI server.|
 *--+--+---+--+
-|88|lens.server.ui.enable.caching|true|Set this to false to disable static file caching in the UI server|
+|89|lens.server.ui.enable.caching|true|Set this to false to disable static file caching in the UI server|
 *--+--+---+--+
-|89|lens.server.ui.static.dir|webapp/lens-server/static|The base directory to server UI static files from|
+|90|lens.server.ui.static.dir|webapp/lens-server/static|The base directory to server UI static files from|
 *--+--+---+--+
-|90|lens.server.user.resolver.custom.class|full.package.name.Classname|Required for CUSTOM user resolver. In case the provided implementations are not sufficient for user config resolver, a custom classname can be provided. Class should extend org.apache.lens.server.user.UserConfigLoader|
+|91|lens.server.user.resolver.custom.class|full.package.name.Classname|Required for CUSTOM user resolver. In case the provided implementations are not sufficient for user config resolver, a custom classname can be provided. Class should extend org.apache.lens.server.user.UserConfigLoader|
 *--+--+---+--+
-|91|lens.server.user.resolver.db.keys|lens.session.cluster.user,mapred.job.queue.name|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loaders, the conf keys that will be loaded from database.|
+|92|lens.server.user.resolver.db.keys|lens.session.cluster.user,mapred.job.queue.name|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loaders, the conf keys that will be loaded from database.|
 *--+--+---+--+
-|92|lens.server.user.resolver.db.query|select clusteruser,queue from user_config_table where username=?|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loader, this query will be run with single argument = logged in user and the result columns will be assigned to lens.server.user.resolver.db.keys in order. For ldap backed database resolver, the argument to this query will be the intermediate values obtained from ldap.|
+|93|lens.server.user.resolver.db.query|select clusteruser,queue from user_config_table where username=?|Required for DATABASE and LDAP_BACKED_DATABASE user resolvers. For database based user config loader, this query will be run with single argument = logged in user and the result columns will be assigned to lens.server.user.resolver.db.keys in order. For ldap backed database resolver, the argument to this query will be the intermediate values obtained from ldap.|
 *--+--+---+--+
-|93|lens.server.user.resolver.fixed.value| |Required for FIXED user resolver. when lens.server.user.resolver.type=FIXED, This will be the value cluster user will resolve to.|
+|94|lens.server.user.resolver.fixed.value| |Required for FIXED user resolver. when lens.server.user.resolver.type=FIXED, This will be the value cluster user will resolve to.|
 *--+--+---+--+
-|94|lens.server.user.resolver.ldap.bind.dn| |Required for LDAP_BACKED_DATABASE user resolvers. ldap dn for admin binding example: CN=company-it-admin,ou=service-account,ou=company-service-account,dc=dc1,dc=com...|
+|95|lens.server.user.resolver.ldap.bind.dn| |Required for LDAP_BACKED_DATABASE user resolvers. ldap dn for admin binding example: CN=company-it-admin,ou=service-account,ou=company-service-account,dc=dc1,dc=com...|
 *--+--+---+--+
-|95|lens.server.user.resolver.ldap.bind.password| |Required for LDAP_BACKED_DATABASE user resolvers. ldap password for admin binding above|
+|96|lens.server.user.resolver.ldap.bind.password| |Required for LDAP_BACKED_DATABASE user resolvers. ldap password for admin binding above|
 *--+--+---+--+
-|96|lens.server.user.resolver.ldap.fields|department|Required for LDAP_BACKED_DATABASE user resolvers. list of fields to be obtained from ldap. These will be cached by the intermediate db.|
+|97|lens.server.user.resolver.ldap.fields|department|Required for LDAP_BACKED_DATABASE user resolvers. list of fields to be obtained from ldap. These will be cached by the intermediate db.|
 *--+--+---+--+
-|97|lens.server.user.resolver.ldap.intermediate.db.delete.sql|delete from user_department where username=?|Required for LDAP_BACKED_DATABASE user resolvers. query to delete intermediate values from database backing ldap as cache. one argument: logged in user.|
+|98|lens.server.user.resolver.ldap.intermediate.db.delete.sql|delete from user_department where username=?|Required for LDAP_BACKED_DATABASE user resolvers. query to delete intermediate values from database backing ldap as cache. one argument: logged in user.|
 *--+--+---+--+
-|98|lens.server.user.resolver.ldap.intermediate.db.insert.sql|insert into user_department (username, department, expiry) values (?, ?, ?)|Required for LDAP_BACKED_DATABASE user resolvers. query to insert intermediate values from database backing ldap as cache. arguments: first logged in user, then all intermediate values, then current time + expiration time|
+|99|lens.server.user.resolver.ldap.intermediate.db.insert.sql|insert into user_department (username, department, expiry) values (?, ?, ?)|Required for LDAP_BACKED_DATABASE user resolvers. query to insert intermediate values from database backing ldap as cache. arguments: first logged in user, then all intermediate values, then current time + expiration time|
 *--+--+---+--+
-|99|lens.server.user.resolver.ldap.intermediate.db.query|select department from user_department where username=? and expiry>?|Required for LDAP_BACKED_DATABASE user resolvers. query to obtain intermediate values from database backing ldap as cache. two arguments: logged in user and current time.|
+|100|lens.server.user.resolver.ldap.intermediate.db.query|select department from user_department where username=? and expiry>?|Required for LDAP_BACKED_DATABASE user resolvers. query to obtain intermediate values from database backing ldap as cache. two arguments: logged in user and current time.|
 *--+--+---+--+
-|100|lens.server.user.resolver.ldap.search.base| |Required for LDAP_BACKED_DATABASE user resolvers. for searching intermediate values for a user, the search keys. example: cn=users,dc=dc1,dc=dc2...|
+|101|lens.server.user.resolver.ldap.search.base| |Required for LDAP_BACKED_DATABASE user resolvers. for searching intermediate values for a user, the search keys. example: cn=users,dc=dc1,dc=dc2...|
 *--+--+---+--+
-|101|lens.server.user.resolver.ldap.search.filter|(&(objectClass=user)(sAMAccountName=%s))|Required for LDAP_BACKED_DATABASE user resolvers. filter pattern for ldap search|
+|102|lens.server.user.resolver.ldap.search.filter|(&(objectClass=user)(sAMAccountName=%s))|Required for LDAP_BACKED_DATABASE user resolvers. filter pattern for ldap search|
 *--+--+---+--+
-|102|lens.server.user.resolver.ldap.url| |Required for LDAP_BACKED_DATABASE user resolvers. ldap url to connect to.|
+|103|lens.server.user.resolver.ldap.url| |Required for LDAP_BACKED_DATABASE user resolvers. ldap url to connect to.|
 *--+--+---+--+
-|103|lens.server.user.resolver.propertybased.filename|/path/to/propertyfile|Required for PROPERTYBASED user resolver. when lens.server.user.resolver.type is PROPERTYBASED, then this file will be read and parsed to determine cluster user. Each line should contain username followed by DOT followed by property full name followed by equal-to sign and followed by value. example schema of the file is: user1.lens.server.cluster.user=clusteruser1 user1.mapred.job.queue.name=queue1 *.lens.server.cluster.user=defaultclusteruser *.mapred.job.queue.name=default|
+|104|lens.server.user.resolver.propertybased.filename|/path/to/propertyfile|Required for PROPERTYBASED user resolver. when lens.server.user.resolver.type is PROPERTYBASED, then this file will be read and parsed to determine cluster user. Each line should contain username followed by DOT followed by property full name followed by equal-to sign and followed by value. example schema of the file is: user1.lens.server.cluster.user=clusteruser1 user1.mapred.job.queue.name=queue1 *.lens.server.cluster.user=defaultclusteruser *.mapred.job.queue.name=default|
 *--+--+---+--+
-|104|lens.server.user.resolver.type|FIXED|Type of user config resolver. allowed values are FIXED, PROPERTYBASED, DATABASE, LDAP_BACKED_DATABASE, CUSTOM.|
+|105|lens.server.user.resolver.type|FIXED|Type of user config resolver. allowed values are FIXED, PROPERTYBASED, DATABASE, LDAP_BACKED_DATABASE, CUSTOM.|
 *--+--+---+--+
-|105|lens.server.waiting.queries.selection.policy.factories|org.apache.lens.server.query.collect.UserSpecificWaitingQueriesSelectionPolicyFactory|Factories used to instantiate waiting queries selection policies. Every factory should be an implementation of org.apache.lens.server.api.common.ConfigBasedObjectCreationFactory and create an implementation of org.apache.lens.server.api.query.collect.WaitingQueriesSelectionPolicy.|
+|106|lens.server.waiting.queries.selection.policy.factories|org.apache.lens.server.query.collect.UserSpecificWaitingQueriesSelectionPolicyFactory|Factories used to instantiate waiting queries selection policies. Every factory should be an implementation of org.apache.lens.server.api.common.ConfigBasedObjectCreationFactory and create an implementation of org.apache.lens.server.api.query.collect.WaitingQueriesSelectionPolicy.|
 *--+--+---+--+
-|106|lens.server.ws.featurenames|multipart|These JAX-RS Feature(s) would be started in the specified order when lens-server starts up|
+|107|lens.server.ws.featurenames|multipart|These JAX-RS Feature(s) would be started in the specified order when lens-server starts up|
 *--+--+---+--+
-|107|lens.server.ws.filternames|authentication,consistentState,serverMode|These JAX-RS filters would be started in the specified order when lens-server starts up|
+|108|lens.server.ws.filternames|authentication,consistentState,serverMode|These JAX-RS filters would be started in the specified order when lens-server starts up|
 *--+--+---+--+
-|108|lens.server.ws.listenernames|appevent|These listeners would be called in the specified order when lens-server starts up|
+|109|lens.server.ws.listenernames|appevent|These listeners would be called in the specified order when lens-server starts up|
 *--+--+---+--+
-|109|lens.server.ws.resourcenames|session,metastore,query,quota,scheduler,index,log|These JAX-RS resources would be started in the specified order when lens-server starts up|
+|110|lens.server.ws.resourcenames|session,metastore,query,quota,scheduler,index,log|These JAX-RS resources would be started in the specified order when lens-server starts up|
 *--+--+---+--+
 The configuration parameters and their default values

http://git-wip-us.apache.org/repos/asf/lens/blob/2539f338/src/site/apt/admin/session-config.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/admin/session-config.apt b/src/site/apt/admin/session-config.apt
index 05a2c2c..e108a13 100644
--- a/src/site/apt/admin/session-config.apt
+++ b/src/site/apt/admin/session-config.apt
@@ -98,6 +98,6 @@ Lens session configuration
 *--+--+---+--+
 |37|lens.session.loggedin.user| |The username used to log in to lens. e.g. LDAP user|
 *--+--+---+--+
-|38|lens.session.metastore.exclude.cubetables.from.nativetables|true|Exclude cube related tables when fetching native tables.|
+|38|lens.session.metastore.exclude.cubetables.from.nativetables|true|Exclude cube related tables when fetching native tables|
 *--+--+---+--+
 The configuration parameters and their default values


[15/51] [abbrv] lens git commit: LENS-836: Query commands in CLI should take default value for query handle as the last executed query

Posted by de...@apache.org.
LENS-836: Query commands in CLI should take default value for query handle as the last executed query


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/7a89db13
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/7a89db13
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/7a89db13

Branch: refs/heads/current-release-line
Commit: 7a89db13e74984de81d840dc015c4ba59471d785
Parents: 04f5a82
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Thu Dec 24 13:19:12 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Thu Dec 24 13:19:12 2015 +0530

----------------------------------------------------------------------
 .../lens/cli/commands/LensQueryCommands.java    | 76 ++++++++++++--------
 .../apache/lens/cli/TestLensQueryCommands.java  |  7 +-
 .../java/org/apache/lens/client/LensClient.java |  9 +--
 .../org/apache/lens/client/LensStatement.java   | 12 +---
 src/site/apt/user/cli.apt                       | 10 +--
 5 files changed, 62 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/7a89db13/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
index e3c08ff..a29600d 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
@@ -61,6 +61,8 @@ import com.google.common.base.Joiner;
     + "  <<<query execute cube select id,name from dim_table where name != \"\"first\"\">>>,\n"
     + "  will be parsed as <<<cube select id,name from dim_table where name != \"first\">>>")
 public class LensQueryCommands extends BaseLensCommand {
+  private static final String DEFAULT_QUERY_HANDLE_DESCRIPTION =
+    "If not provided, takes last query handle interacted with.";
 
   /**
    * Execute query.
@@ -110,7 +112,6 @@ public class LensQueryCommands extends BaseLensCommand {
    */
   private String formatResultSet(LensClient.LensClientResultSetWithStats rs) {
     StringBuilder b = new StringBuilder();
-    int numRows = 0;
     if (rs.getResultSet() != null) {
       QueryResultSetMetadata resultSetMetadata = rs.getResultSet().getResultSetMetadata();
       for (ResultColumn column : resultSetMetadata.getColumns()) {
@@ -125,7 +126,7 @@ public class LensQueryCommands extends BaseLensCommand {
         PersistentQueryResult temp = (PersistentQueryResult) r;
         b.append("Results of query stored at : ").append(temp.getPersistedURI()).append("  ");
         if (null != temp.getNumRows()) {
-          b.append(temp.getNumRows() + " rows ");
+          b.append(temp.getNumRows()).append(" rows ");
         }
       }
     }
@@ -139,20 +140,33 @@ public class LensQueryCommands extends BaseLensCommand {
     return b.toString();
   }
 
+  public String getOrDefaultQueryHandleString(String queryHandleString) {
+    if (queryHandleString != null) {
+      return queryHandleString;
+    }
+    if (getClient().getStatement().getQuery() != null) {
+      return getClient().getStatement().getQueryHandleString();
+    }
+    throw new IllegalArgumentException("Query handle not provided and no queries interacted with in the session.");
+  }
+
   /**
    * Gets the status.
    *
    * @param qh the qh
    * @return the status
    */
-  @CliCommand(value = "query status", help = "Fetch status of executed query having query handle <query_handle>")
+  @CliCommand(value = "query status",
+    help = "Fetch status of executed query having query handle <query_handle>. " + DEFAULT_QUERY_HANDLE_DESCRIPTION)
   public String getStatus(
-    @CliOption(key = {"", "query_handle"}, mandatory = true, help = "<query_handle>") String qh) {
-    QueryStatus status = getClient().getQueryStatus(new QueryHandle(UUID.fromString(qh)));
+    @CliOption(key = {"", "query_handle"}, mandatory = false, help = "<query_handle>") String qh) {
+    qh = getOrDefaultQueryHandleString(qh);
+    QueryHandle handle = QueryHandle.fromString(qh);
+    QueryStatus status = getClient().getQueryStatus(handle);
     if (status == null) {
-      return "Unable to find status for " + qh;
+      return "Unable to find status for " + handle;
     }
-    return status.toString();
+    return "Query Handle: " + qh + "\n" + status.toString();
   }
 
   /**
@@ -161,15 +175,15 @@ public class LensQueryCommands extends BaseLensCommand {
    * @param qh the qh
    * @return the query
    */
-  @CliCommand(value = "query details", help = "Get query details of query with handle <query_handle>")
+  @CliCommand(value = "query details",
+    help = "Get query details of query with handle <query_handle>." + DEFAULT_QUERY_HANDLE_DESCRIPTION)
   public String getDetails(
-    @CliOption(key = {"", "query_handle"}, mandatory = true, help
-      = "<query_handle>") String qh) {
+    @CliOption(key = {"", "query_handle"}, mandatory = false, help = "<query_handle>") String qh) {
+    qh = getOrDefaultQueryHandleString(qh);
     LensQuery query = getClient().getQueryDetails(qh);
     if (query == null) {
       return "Unable to find query for " + qh;
     }
-
     try {
       return formatJson(mapper.writer(pp).writeValueAsString(query));
     } catch (IOException e) {
@@ -185,10 +199,11 @@ public class LensQueryCommands extends BaseLensCommand {
    * @throws LensAPIException
    * @throws UnsupportedEncodingException the unsupported encoding exception
    */
-  @CliCommand(value = "query explain", help = "Explain execution plan of query <query-string>. "
+  @CliCommand(value = "query explain",
+    help = "Explain execution plan of query <query-string>. "
       + "Can optionally save the plan to a file by providing <save_location>")
-  public String explainQuery(@CliOption(key = { "", "query" }, mandatory = true, help = "<query-string>") String sql,
-      @CliOption(key = { "save_location" }, mandatory = false, help = "<save_location>") final File path)
+  public String explainQuery(@CliOption(key = {"", "query"}, mandatory = true, help = "<query-string>") String sql,
+    @CliOption(key = {"save_location"}, mandatory = false, help = "<save_location>") final File path)
     throws IOException, LensAPIException {
     PrettyPrintable cliOutput;
 
@@ -197,7 +212,7 @@ public class LensQueryCommands extends BaseLensCommand {
       if (path != null && StringUtils.isNotBlank(path.getPath())) {
         String validPath = getValidPath(path, false, false);
         try (OutputStreamWriter osw = new OutputStreamWriter(new FileOutputStream(validPath),
-            Charset.defaultCharset())) {
+          Charset.defaultCharset())) {
           osw.write(plan.getPlanString());
         }
         return "Saved to " + validPath;
@@ -250,9 +265,10 @@ public class LensQueryCommands extends BaseLensCommand {
    * @param qh the qh
    * @return the string
    */
-  @CliCommand(value = "query kill", help = "Kill query with handle <query_handle>")
-  public String killQuery(
-    @CliOption(key = {"", "query_handle"}, mandatory = true, help = "<query_handle>") String qh) {
+  @CliCommand(value = "query kill", help = "Kill query with handle <query_handle>." + DEFAULT_QUERY_HANDLE_DESCRIPTION)
+  public String killQuery(@CliOption(key = {"", "query_handle"}, mandatory = false, help = "<query_handle>") String
+    qh) {
+    qh = getOrDefaultQueryHandleString(qh);
     boolean status = getClient().killQuery(new QueryHandle(UUID.fromString(qh)));
     if (status) {
       return "Successfully killed " + qh;
@@ -268,19 +284,19 @@ public class LensQueryCommands extends BaseLensCommand {
    * @return the query results
    */
   @CliCommand(value = "query results",
-    help = "get results of query with query handle <query_handle>. If async is false "
-      + "then wait till the query execution is completed, it's by default true. "
+    help = "get results of query with query handle <query_handle>. " + DEFAULT_QUERY_HANDLE_DESCRIPTION
+      + "If async is false then wait till the query execution is completed, it's by default true. "
       + "Can optionally save the results to a file by providing <save_location>.")
   public String getQueryResults(
-    @CliOption(key = {"", "query_handle"}, mandatory = true, help = "<query_handle>") String qh,
+    @CliOption(key = {"", "query_handle"}, mandatory = false, help = "<query_handle>") String qh,
     @CliOption(key = {"save_location"}, mandatory = false, help = "<save_location>") final File path,
     @CliOption(key = {"async"}, mandatory = false, unspecifiedDefaultValue = "true",
       help = "<async>") boolean async) {
+    qh = getOrDefaultQueryHandleString(qh);
     QueryHandle queryHandle = new QueryHandle(UUID.fromString(qh));
     LensClient.LensClientResultSetWithStats results;
     String location = path != null ? path.getPath() : null;
     try {
-      String prefix = "";
       if (StringUtils.isNotBlank(location)) {
         location = getValidPath(path, true, true);
         Response response = getClient().getHttpResults(queryHandle);
@@ -289,7 +305,7 @@ public class LensQueryCommands extends BaseLensCommand {
           String fileName = disposition.split("=")[1].trim();
           location = getValidPath(new File(location + File.separator + fileName), false, false);
           try (InputStream stream = response.readEntity(InputStream.class);
-            FileOutputStream outStream = new FileOutputStream(new File(location))) {
+               FileOutputStream outStream = new FileOutputStream(new File(location))) {
             IOUtils.copy(stream, outStream);
           }
           return "Saved to " + location;
@@ -364,7 +380,7 @@ public class LensQueryCommands extends BaseLensCommand {
       StringBuilder sb = new StringBuilder()
         .append("User query:").append(prepared.getUserQuery()).append("\n")
         .append("Prepare handle:").append(prepared.getPrepareHandle()).append("\n")
-        .append("User:" + prepared.getPreparedUser()).append("\n")
+        .append("User:").append(prepared.getPreparedUser()).append("\n")
         .append("Prepared at:").append(prepared.getPreparedTime()).append("\n")
         .append("Selected driver :").append(prepared.getSelectedDriverName()).append("\n")
         .append("Driver query:").append(prepared.getDriverQuery()).append("\n");
@@ -454,19 +470,17 @@ public class LensQueryCommands extends BaseLensCommand {
    *           the unsupported encoding exception
    * @throws LensAPIException
    */
-  @CliCommand(value = "prepQuery explain", help = "Explain and prepare query <query-string>. "
-      + "Can optionally provide <query-name>")
+  @CliCommand(value = "prepQuery explain",
+    help = "Explain and prepare query <query-string>. Can optionally provide <query-name>")
   public String explainAndPrepare(
 
-  @CliOption(key = { "", "query" }, mandatory = true, help = "<query-string>") String sql,
-      @CliOption(key = { "name" }, mandatory = false, help = "<query-name>") String queryName)
+    @CliOption(key = {"", "query"}, mandatory = true, help = "<query-string>") String sql,
+    @CliOption(key = {"name"}, mandatory = false, help = "<query-name>") String queryName)
     throws UnsupportedEncodingException, LensAPIException {
     PrettyPrintable cliOutput;
     try {
       QueryPlan plan = getClient().explainAndPrepare(sql, queryName).getData();
-      StringBuilder planStr = new StringBuilder(plan.getPlanString());
-      planStr.append("\n").append("Prepare handle:").append(plan.getPrepareHandle());
-      return planStr.toString();
+      return plan.getPlanString() + "\n" + "Prepare handle:" + plan.getPrepareHandle();
     } catch (final LensAPIException e) {
       BriefError briefError = new BriefError(e.getLensAPIErrorCode(), e.getLensAPIErrorMessage());
       cliOutput = new IdBriefErrorTemplate(IdBriefErrorTemplateKey.REQUEST_ID, e.getLensAPIRequestId(), briefError);

http://git-wip-us.apache.org/repos/asf/lens/blob/7a89db13/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
index 6ebfff7..2de3cc1 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
@@ -102,6 +102,7 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
     String sql = "cube select id,name from test_dim";
     String result = qCom.executeQuery(sql, false, "testQuery2");
     assertTrue(result.contains("1\tfirst"), result);
+
   }
 
   /**
@@ -237,11 +238,11 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
     String[] resultSplits = result.split("\n");
     // assert on the number of queries
     assertEquals(String.valueOf(resultSplits.length - 1), resultSplits[resultSplits.length - 1].split(": ")[1]);
-
+    assertEquals(qCom.getOrDefaultQueryHandleString(null), qh);
     QueryStatus queryStatus = qCom.getClient().getQueryStatus(qh);
     while (!queryStatus.finished()) {
       if (queryStatus.launched()) {
-        String details = qCom.getDetails(qh);
+        String details = qCom.getDetails(null);
         assertTrue(details.contains("driverQuery"));
       }
       Thread.sleep(1000);
@@ -256,7 +257,7 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
     String details = qCom.getDetails(qh);
     assertTrue(details.contains("driverQuery"));
 
-    result = qCom.getQueryResults(qh, null, true);
+    result = qCom.getQueryResults(null, null, true);
     assertTrue(result.contains("1\tfirst"));
 
     downloadResult(qCom, qh, result);

http://git-wip-us.apache.org/repos/asf/lens/blob/7a89db13/lens-client/src/main/java/org/apache/lens/client/LensClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensClient.java b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
index 8f197e4..f7f99c7 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensClient.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
@@ -57,6 +57,7 @@ public class LensClient {
   private LensConnection connection;
   private final HashMap<QueryHandle, LensStatement> statementMap =
     Maps.newHashMap();
+  @Getter
   private final LensStatement statement;
 
   @Getter
@@ -192,11 +193,11 @@ public class LensClient {
   }
 
   public QueryStatus getQueryStatus(QueryHandle query) {
-    return new LensStatement(connection).getQuery(query).getStatus();
+    return statement.getQuery(query).getStatus();
   }
 
   public LensQuery getQueryDetails(QueryHandle handle) {
-    return new LensStatement(connection).getQuery(handle);
+    return statement.getQuery(handle);
   }
 
   public QueryStatus getQueryStatus(String q) {
@@ -208,7 +209,7 @@ public class LensClient {
   }
 
   public LensAPIResult<QueryPlan> getQueryPlan(String q) throws LensAPIException {
-    return new LensStatement(connection).explainQuery(q);
+    return statement.explainQuery(q);
   }
 
   public boolean killQuery(QueryHandle q) {
@@ -228,7 +229,7 @@ public class LensClient {
 
   public List<QueryHandle> getQueries(String state, String queryName, String user, String driver, long fromDate,
     long toDate) {
-    return new LensStatement(connection).getAllQueries(state, queryName, user, driver, fromDate, toDate);
+    return statement.getAllQueries(state, queryName, user, driver, fromDate, toDate);
   }
 
   private void connectToLensServer() {

http://git-wip-us.apache.org/repos/asf/lens/blob/7a89db13/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensStatement.java b/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
index 71caa48..8de7708 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
@@ -40,9 +40,12 @@ import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataMultiPart;
 
+import lombok.RequiredArgsConstructor;
+
 /**
  * Top level class which is used to execute lens queries.
  */
+@RequiredArgsConstructor
 public class LensStatement {
 
   /** The connection. */
@@ -52,15 +55,6 @@ public class LensStatement {
   private LensQuery query;
 
   /**
-   * Instantiates a new lens statement.
-   *
-   * @param connection the connection
-   */
-  public LensStatement(LensConnection connection) {
-    this.connection = connection;
-  }
-
-  /**
    * Execute.
    *
    * @param sql                    the sql

http://git-wip-us.apache.org/repos/asf/lens/blob/7a89db13/src/site/apt/user/cli.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/user/cli.apt b/src/site/apt/user/cli.apt
index 3db53c4..65380e6 100644
--- a/src/site/apt/user/cli.apt
+++ b/src/site/apt/user/cli.apt
@@ -347,19 +347,19 @@ User CLI Commands
 *--+--+
 |prepQuery prepare [--query] \<query-string\> [--name \<query-name\>]|Prepapre query <<<query-string>>> and return prepare handle. Can optionaly provide <<<query-name>>>|
 *--+--+
-|query details [--query_handle] \<query_handle\>|Get query details of query with handle <<<query_handle>>>|
+|query details [[--query_handle] \<query_handle\>]|Get query details of query with handle <<<query_handle>>>.If not provided, takes last query handle interacted with.|
 *--+--+
 |query execute [--query] \<query-string\> [--async \<async\>] [--name \<query-name\>]|Execute query <<<query-string>>>. If <<<async>>> is true, The query is launched in async manner and query handle is returned. It's by default false. <<<query name>>> can also be provided, though not required|
 *--+--+
 |query explain [--query] \<query-string\> [--save_location \<save_location\>]|Explain execution plan of query <<<query-string>>>. Can optionally save the plan to a file by providing <<<save_location>>>|
 *--+--+
-|query kill [--query_handle] \<query_handle\>|Kill query with handle <<<query_handle>>>|
+|query kill [[--query_handle] \<query_handle\>]|Kill query with handle <<<query_handle>>>.If not provided, takes last query handle interacted with.|
 *--+--+
-|query list [--state \<query-status\>] [--name \<query-name\>] [--user \<user-who-submitted-query\>] [--driver \<driver-where-query-was-executed\>] [--fromDate \<submission-time-is-after\>] [--toDate \<submission-time-is-before\>]|Get all queries. Various filter options can be provided(optionally),  as can be seen from the command syntax|
+|query list [--state \<query-status\>] [--name \<query-name\>] [--user \<user-who-submitted-query\>] [--driver \<driver-where-query-ran\>] [--fromDate \<submission-time-is-after\>] [--toDate \<submission-time-is-before\>]|Get all queries. Various filter options can be provided(optionally),  as can be seen from the command syntax|
 *--+--+
-|query results [--query_handle] \<query_handle\> [--save_location \<save_location\>] [--async \<async\>]|get results of query with query handle <<<query_handle>>>. If async is false then wait till the query execution is completed, it's by default true. Can optionally save the results to a file by providing <<<save_location>>>.|
+|query results [[--query_handle] \<query_handle\>] [--save_location \<save_location\>] [--async \<async\>]|get results of query with query handle <<<query_handle>>>. If not provided, takes last query handle interacted with.If async is false then wait till the query execution is completed, it's by default true. Can optionally save the results to a file by providing <<<save_location>>>.|
 *--+--+
-|query status [--query_handle] \<query_handle\>|Fetch status of executed query having query handle <<<query_handle>>>|
+|query status [[--query_handle] \<query_handle\>]|Fetch status of executed query having query handle <<<query_handle>>>. If not provided, takes last query handle interacted with.|
 *--+--+
   <<Lens Query Commands>>
 


[31/51] [abbrv] lens git commit: LENS-910 : Add session config to skip filtering cube related tables from all the tables in a database

Posted by de...@apache.org.
LENS-910 : Add session config to skip filtering cube related tables from all the tables in a database


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/edcdd968
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/edcdd968
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/edcdd968

Branch: refs/heads/current-release-line
Commit: edcdd968541b6de572b65abbe92b4f3601c2f7a3
Parents: c7451f8
Author: Deepak Barr <de...@apache.org>
Authored: Tue Jan 12 12:24:29 2016 +0530
Committer: Deepak Kumar Barr <de...@apache.org>
Committed: Tue Jan 12 12:24:29 2016 +0530

----------------------------------------------------------------------
 .../lens/cli/TestLensNativeTableCommands.java   |  4 ++-
 .../lens/server/api/LensConfConstants.java      | 10 ++++++++
 .../metastore/CubeMetastoreServiceImpl.java     |  6 +++++
 .../src/main/resources/lenssession-default.xml  |  6 +++++
 .../apache/lens/server/LensServerTestUtil.java  |  5 +++-
 .../server/metastore/TestMetastoreService.java  | 26 +++++++++++++++++++-
 src/site/apt/admin/session-config.apt           |  2 ++
 7 files changed, 56 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/edcdd968/lens-cli/src/test/java/org/apache/lens/cli/TestLensNativeTableCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensNativeTableCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensNativeTableCommands.java
index d453803..e5f11f2 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensNativeTableCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensNativeTableCommands.java
@@ -18,6 +18,8 @@
  */
 package org.apache.lens.cli;
 
+import java.util.HashMap;
+
 import org.apache.lens.cli.commands.LensNativeTableCommands;
 import org.apache.lens.client.LensClient;
 import org.apache.lens.server.LensServerTestUtil;
@@ -50,7 +52,7 @@ public class TestLensNativeTableCommands extends LensCliApplicationTest {
       LOG.debug("Starting to test nativetable commands");
       String tblList = command.showNativeTables();
       Assert.assertFalse(tblList.contains("test_native_table_command"));
-      LensServerTestUtil.createHiveTable("test_native_table_command");
+      LensServerTestUtil.createHiveTable("test_native_table_command", new HashMap<String, String>());
       tblList = command.showNativeTables();
       Assert.assertTrue(tblList.contains("test_native_table_command"));
 

http://git-wip-us.apache.org/repos/asf/lens/blob/edcdd968/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java b/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
index 88e5a01..a3dbfc0 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
@@ -996,4 +996,14 @@ public final class LensConfConstants {
    * The Constant DEFAULT_HDFS_OUTPUT_RETENTION.
    */
   public static final String DEFAULT_HDFS_OUTPUT_RETENTION = "1 day";
+
+  /**
+   * The Constant EXCLUDE_CUBE_TABLES.
+   */
+  public static final String EXCLUDE_CUBE_TABLES = SESSION_PFX + "metastore.exclude.cubetables.from.nativetables";
+
+  /**
+   * The Constant DEFAULT_EXCLUDE_CUBE_TABLES.
+   */
+  public static final boolean DEFAULT_EXCLUDE_CUBE_TABLES = true;
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/edcdd968/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
index cf49a13..fc67df1 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
@@ -31,12 +31,14 @@ import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.timeline.PartitionTimeline;
 import org.apache.lens.server.BaseLensService;
 import org.apache.lens.server.LensServerConf;
+import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.health.HealthStatus;
 import org.apache.lens.server.api.metastore.CubeMetastoreService;
 import org.apache.lens.server.session.LensSessionImpl;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.*;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -1193,6 +1195,10 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
       msc = getSession(sessionid).getMetaStoreClient();
       List<String> tables = msc.getAllTables(
         dbName);
+      Configuration conf = getSession(sessionid).getSessionConf();
+      if (!conf.getBoolean(LensConfConstants.EXCLUDE_CUBE_TABLES, LensConfConstants.DEFAULT_EXCLUDE_CUBE_TABLES)) {
+        return tables;
+      }
       List<String> result = new ArrayList<String>();
       if (tables != null && !tables.isEmpty()) {
         List<org.apache.hadoop.hive.metastore.api.Table> tblObjects =

http://git-wip-us.apache.org/repos/asf/lens/blob/edcdd968/lens-server/src/main/resources/lenssession-default.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/main/resources/lenssession-default.xml b/lens-server/src/main/resources/lenssession-default.xml
index 52e0cd0..a321c3f 100644
--- a/lens-server/src/main/resources/lenssession-default.xml
+++ b/lens-server/src/main/resources/lenssession-default.xml
@@ -311,4 +311,10 @@
     <description>Whether to fail the query of data is partial</description>
   </property>
 
+  <property>
+    <name>lens.session.metastore.exclude.cubetables.from.nativetables</name>
+    <value>true</value>
+    <description>Exclude cube related tables when fetching native tables</description>
+  </property>
+
 </configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/edcdd968/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java b/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java
index 57bedee..94dd394 100644
--- a/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java
+++ b/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java
@@ -223,13 +223,16 @@ public final class LensServerTestUtil {
    * @param tableName the table name
    * @throws HiveException the hive exception
    */
-  public static void createHiveTable(String tableName) throws HiveException {
+  public static void createHiveTable(String tableName, Map<String, String> parameters) throws HiveException {
     List<FieldSchema> columns = new ArrayList<FieldSchema>();
     columns.add(new FieldSchema("col1", "string", ""));
     List<FieldSchema> partCols = new ArrayList<FieldSchema>();
     partCols.add(new FieldSchema("pcol1", "string", ""));
     Map<String, String> params = new HashMap<String, String>();
     params.put("test.hive.table.prop", "tvalue");
+    if (null != parameters && !parameters.isEmpty()) {
+      params.putAll(parameters);
+    }
     Table tbl = Hive.get().newTable(tableName);
     tbl.setTableType(TableType.MANAGED_TABLE);
     tbl.getTTable().getSd().setCols(columns);

http://git-wip-us.apache.org/repos/asf/lens/blob/edcdd968/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
index b0044da..925fc86 100644
--- a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
@@ -50,6 +50,7 @@ import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
 import org.apache.lens.server.LensJerseyTest;
 import org.apache.lens.server.LensServerTestUtil;
 import org.apache.lens.server.LensServices;
+import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.metastore.CubeMetastoreService;
 import org.apache.lens.server.api.util.LensUtil;
 
@@ -2415,7 +2416,7 @@ public class TestMetastoreService extends LensJerseyTest {
       // create hive table
       String tableName = "test_simple_table";
       SessionState.get().setCurrentDatabase(DB);
-      LensServerTestUtil.createHiveTable(tableName);
+      LensServerTestUtil.createHiveTable(tableName, new HashMap<String, String>());
 
       WebTarget target = target().path("metastore").path("nativetables");
       // get all native tables
@@ -2447,6 +2448,29 @@ public class TestMetastoreService extends LensJerseyTest {
       assertEquals(nativetables.getElements().size(), 1);
       assertEquals(nativetables.getElements().get(0), tableName);
 
+      // test for lens.session.metastore.exclude.cubetables.from.nativetables session config
+      String cubeTableName = "test_cube_table";
+      Map<String, String> params = new HashMap<String, String>();
+      params.put(MetastoreConstants.TABLE_TYPE_KEY, CubeTableType.CUBE.name());
+      LensServerTestUtil.createHiveTable(cubeTableName, params);
+
+      // Test for excluding cube tables
+      nativetables = target.queryParam("sessionid", lensSessionId).queryParam("dbName", DB)
+        .queryParam("dbOption", "current").request(mediaType).get(StringList.class);
+      assertEquals(nativetables.getElements().size(), 1);
+      assertEquals(nativetables.getElements().get(0), tableName);
+
+      // Test for not excluding cube tables
+      Map<String, String> sessionConf = new HashMap<String, String>();
+      sessionConf.put(LensConfConstants.EXCLUDE_CUBE_TABLES, "false");
+      LensSessionHandle lensSessionId2 =
+        metastoreService.openSession("foo", "bar", sessionConf);
+      nativetables = target.queryParam("sessionid", lensSessionId2).queryParam("dbName", DB)
+        .queryParam("dbOption", "current").request(mediaType).get(StringList.class);
+      assertEquals(nativetables.getElements().size(), 2);
+      assertTrue(nativetables.getElements().contains(tableName));
+      assertTrue(nativetables.getElements().contains(cubeTableName));
+
       // Now get the table
       JAXBElement<XNativeTable> actualElement = target.path(tableName).queryParam(
         "sessionid", lensSessionId).request(mediaType).get(new GenericType<JAXBElement<XNativeTable>>() {});

http://git-wip-us.apache.org/repos/asf/lens/blob/edcdd968/src/site/apt/admin/session-config.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/admin/session-config.apt b/src/site/apt/admin/session-config.apt
index 5ed51c3..05a2c2c 100644
--- a/src/site/apt/admin/session-config.apt
+++ b/src/site/apt/admin/session-config.apt
@@ -98,4 +98,6 @@ Lens session configuration
 *--+--+---+--+
 |37|lens.session.loggedin.user| |The username used to log in to lens. e.g. LDAP user|
 *--+--+---+--+
+|38|lens.session.metastore.exclude.cubetables.from.nativetables|true|Exclude cube related tables when fetching native tables.|
+*--+--+---+--+
 The configuration parameters and their default values


[23/51] [abbrv] lens git commit: LENS-735 : Remove accepting TableReferences for ReferenceDimAttribute

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
index d9e442d..2cf92b9 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
@@ -21,6 +21,7 @@ package org.apache.lens.cube.parse;
 
 import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
+import static org.apache.lens.cube.parse.TestCubeRewriter.compareQueries;
 
 import static org.testng.Assert.*;
 
@@ -28,7 +29,6 @@ import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
-import org.apache.lens.cube.metadata.SchemaGraph.TableRelationship;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
@@ -45,7 +45,6 @@ import org.testng.annotations.Test;
 public class TestJoinResolver extends TestQueryRewrite {
 
   private static HiveConf hconf = new HiveConf(TestJoinResolver.class);
-  private CubeMetastoreClient metastore;
 
   @BeforeTest
   public void setupInstance() throws Exception {
@@ -53,124 +52,25 @@ public class TestJoinResolver extends TestQueryRewrite {
     hconf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
     hconf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, true);
     hconf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, true);
+    hconf.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, false);
     hconf.setBoolean(CubeQueryConfUtil.ENABLE_FLATTENING_FOR_BRIDGETABLES, true);
-    this.metastore = CubeMetastoreClient.getInstance(hconf);
   }
 
   @AfterTest
   public void closeInstance() throws Exception {
   }
 
-  // testBuildGraph - graph correctness
-  @Test
-  public void testBuildGraph() throws Exception {
-    SchemaGraph schemaGraph = metastore.getSchemaGraph();
-    CubeInterface cube = metastore.getCube(CubeTestSetup.TEST_CUBE_NAME);
-    Map<AbstractCubeTable, Set<TableRelationship>> graph = schemaGraph.getCubeGraph(cube);
-    printGraph(graph);
-    Assert.assertNotNull(graph);
-
-    // Let's do some lookups
-    Set<TableRelationship> dim4Edges = graph.get(metastore.getDimension("testdim4"));
-    Assert.assertNull(dim4Edges);
-    dim4Edges = graph.get(metastore.getDimension("testdim3"));
-    Assert.assertNotNull(dim4Edges);
-    Assert.assertEquals(1, dim4Edges.size());
-
-    List<TableRelationship> edges = new ArrayList<TableRelationship>(dim4Edges);
-    TableRelationship dim4edge = edges.get(0);
-    Assert.assertEquals("id", dim4edge.getToColumn());
-    Assert.assertEquals(metastore.getDimension("testdim4"), dim4edge.getToTable());
-    Assert.assertEquals("testdim4id", dim4edge.getFromColumn());
-    Assert.assertEquals(metastore.getDimension("testdim3"), dim4edge.getFromTable());
-  }
-
-  private void searchPaths(AbstractCubeTable source, AbstractCubeTable target, SchemaGraph graph) {
-    SchemaGraph.GraphSearch search = new SchemaGraph.GraphSearch(source, target, graph);
-    List<SchemaGraph.JoinPath> joinPaths = search.findAllPathsToTarget();
-
-    System.out.println("@@ " + source + " ==> " + target + " paths =");
-    int i = 0;
-    for (SchemaGraph.JoinPath jp : joinPaths) {
-      Assert.assertEquals(jp.getEdges().get(0).getToTable(), source);
-      Assert.assertEquals(jp.getEdges().get(jp.getEdges().size() - 1).getFromTable(), target);
-      Collections.reverse(jp.getEdges());
-      System.out.println(++i + " " + jp.getEdges());
-    }
-  }
-
-  @Test
-  public void testFindChain() throws Exception {
-    SchemaGraph schemaGraph = metastore.getSchemaGraph();
-    schemaGraph.print();
-
-    // Search For all cubes and all dims to make sure that search terminates
-    for (CubeInterface cube : metastore.getAllCubes()) {
-      for (Dimension dim : metastore.getAllDimensions()) {
-        searchPaths(dim, (AbstractCubeTable) cube, schemaGraph);
-      }
-    }
-
-    for (Dimension dim : metastore.getAllDimensions()) {
-      for (Dimension otherDim : metastore.getAllDimensions()) {
-        if (otherDim != dim) {
-          searchPaths(dim, otherDim, schemaGraph);
-        }
-      }
-    }
-
-    // Assert for testcube
-    CubeInterface testCube = metastore.getCube("testcube");
-    Dimension zipDim = metastore.getDimension("zipdim");
-    Dimension cityDim = metastore.getDimension("citydim");
-    Dimension testDim2 = metastore.getDimension("testDim2");
-
-    SchemaGraph.GraphSearch search = new SchemaGraph.GraphSearch(zipDim, (AbstractCubeTable) testCube, schemaGraph);
-
-    List<SchemaGraph.JoinPath> paths = search.findAllPathsToTarget();
-    Assert.assertEquals(6, paths.size());
-    validatePath(paths.get(0), zipDim, (AbstractCubeTable) testCube);
-    validatePath(paths.get(1), zipDim, cityDim, (AbstractCubeTable) testCube);
-    validatePath(paths.get(2), zipDim, cityDim, testDim2, (AbstractCubeTable) testCube);
-    validatePath(paths.get(3), zipDim, cityDim, testDim2, (AbstractCubeTable) testCube);
-    validatePath(paths.get(4), zipDim, cityDim, testDim2, (AbstractCubeTable) testCube);
-    validatePath(paths.get(5), zipDim, cityDim, testDim2, (AbstractCubeTable) testCube);
-  }
-
-  private void validatePath(SchemaGraph.JoinPath jp, AbstractCubeTable... tables) {
-    Assert.assertTrue(!jp.getEdges().isEmpty());
-    Set<AbstractCubeTable> expected = new HashSet<AbstractCubeTable>(Arrays.asList(tables));
-    Set<AbstractCubeTable> actual = new HashSet<AbstractCubeTable>();
-    for (TableRelationship edge : jp.getEdges()) {
-      if (edge.getFromTable() != null) {
-        actual.add(edge.getFromTable());
-      }
-      if (edge.getToTable() != null) {
-        actual.add(edge.getToTable());
-      }
-    }
-
-    Assert.assertEquals(expected, actual,
-      "Edges: " + jp.getEdges().toString() + " Expected Tables: " + Arrays.toString(tables) + " Actual Tables: "
-        + actual.toString());
-  }
-
-  private void printGraph(Map<AbstractCubeTable, Set<TableRelationship>> graph) {
-    System.out.println("--Graph-Nodes=" + graph.size());
-    for (AbstractCubeTable tab : graph.keySet()) {
-      System.out.println(tab.getName() + "::" + graph.get(tab));
-    }
-  }
-
   private String getAutoResolvedFromString(CubeQueryContext query) throws LensException {
     return query.getHqlContext().getFrom();
   }
 
   @Test
   public void testAutoJoinResolver() throws Exception {
+    HiveConf conf = new HiveConf(hconf);
+    conf.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, true);
     // Test 1 Cube + dim
-    String query = "select citydim.name, testDim2.name, testDim4.name, msr2 from testCube where " + TWO_DAYS_RANGE;
-    CubeQueryRewriter driver = new CubeQueryRewriter(hconf, hconf);
+    String query = "select cubeCity.name, dim2chain.name, dim4chain.name, msr2 from testCube where " + TWO_DAYS_RANGE;
+    CubeQueryRewriter driver = new CubeQueryRewriter(conf, conf);
     CubeQueryContext rewrittenQuery = driver.rewrite(query);
     String hql = rewrittenQuery.toHQL();
     System.out.println("testAutoJoinResolverauto join HQL:" + hql);
@@ -178,13 +78,13 @@ public class TestJoinResolver extends TestQueryRewrite {
     List<String> expectedClauses = new ArrayList<String>();
     expectedClauses.add(getDbName() + "c1_testfact2_raw testcube");
     expectedClauses.add(getDbName()
-      + "c1_citytable citydim on testcube.cityid = citydim.id and (citydim.dt = 'latest')");
+      + "c1_citytable cubecity on testcube.cityid = cubecity.id and (cubecity.dt = 'latest')");
     expectedClauses.add(getDbName()
-      + "c1_testdim2tbl testdim2 on testcube.dim2 = testdim2.id and (testdim2.dt = 'latest')");
+      + "c1_testdim2tbl dim2chain on testcube.dim2 = dim2chain.id and (dim2chain.dt = 'latest')");
     expectedClauses.add(getDbName()
-      + "c1_testdim3tbl testdim3 on testdim2.testdim3id = testdim3.id and (testdim3.dt = 'latest')");
+      + "c1_testdim3tbl testdim3 on dim2chain.testdim3id = testdim3.id and (testdim3.dt = 'latest')");
     expectedClauses.add(getDbName()
-      + "c1_testdim4tbl testdim4 on testdim3.testdim4id = testdim4.id and (testdim4.dt = 'latest')");
+      + "c1_testdim4tbl dim4chain on testdim3.testdim4id = dim4chain.id and (dim4chain.dt = 'latest')");
 
     List<String> actualClauses = new ArrayList<String>();
     for (String clause : StringUtils.splitByWholeSeparator(getAutoResolvedFromString(rewrittenQuery), "join")) {
@@ -199,7 +99,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     // Test 2 Dim only query
     expectedClauses.clear();
     actualClauses.clear();
-    String dimOnlyQuery = "select testDim2.name, testDim4.name FROM testDim2 where " + TWO_DAYS_RANGE;
+    String dimOnlyQuery = "select testDim2.name, dim4chain.name FROM testDim2 where " + TWO_DAYS_RANGE;
     rewrittenQuery = driver.rewrite(dimOnlyQuery);
     hql = rewrittenQuery.toHQL();
     System.out.println("testAutoJoinResolverauto join HQL:" + hql);
@@ -208,7 +108,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     expectedClauses.add(getDbName()
       + "c1_testdim3tbl testdim3 on testdim2.testdim3id = testdim3.id and (testdim3.dt = 'latest')");
     expectedClauses.add(getDbName()
-      + "c1_testdim4tbl testdim4 on testdim3.testdim4id = testdim4.id and (testdim4.dt = 'latest')");
+      + "c1_testdim4tbl dim4chain on testdim3.testdim4id = dim4chain.id and (dim4chain.dt = 'latest')");
     for (String clause : StringUtils.splitByWholeSeparator(getAutoResolvedFromString(rewrittenQuery), "join")) {
       if (StringUtils.isNotBlank(clause)) {
         actualClauses.add(clause.trim());
@@ -224,28 +124,27 @@ public class TestJoinResolver extends TestQueryRewrite {
   }
 
   @Test
-  public void testPartialJoinResolver() throws Exception {
+  public void testJoinFilters() throws Exception {
     String query =
-      "SELECT citydim.name, testDim4.name, msr2 "
-        + "FROM testCube left outer join citydim ON citydim.name = 'FOOBAR'"
-        + " right outer join testDim4 on testDim4.name='TESTDIM4NAME'" + " WHERE " + TWO_DAYS_RANGE;
-    CubeQueryRewriter driver = new CubeQueryRewriter(hconf, hconf);
-    CubeQueryContext rewrittenQuery = driver.rewrite(query);
-    String hql = rewrittenQuery.toHQL();
-    System.out.println("testPartialJoinResolver Partial join hql: " + hql);
-    String partSQL =
-      " left outer join " + getDbName() + "c1_citytable citydim on testcube.cityid "
-        + "= citydim.id and (( citydim . name ) =  'FOOBAR' ) " + "and (citydim.dt = 'latest')";
-    Assert.assertTrue(hql.contains(partSQL));
-    partSQL =
-      " right outer join " + getDbName() + "c1_testdim2tbl testdim2 on "
-        + "testcube.dim2 = testdim2.id right outer join " + getDbName()
-        + "c1_testdim3tbl testdim3 on testdim2.testdim3id = testdim3.id and "
-        + "(testdim2.dt = 'latest') right outer join " + getDbName()
-        + "c1_testdim4tbl testdim4 on testdim3.testdim4id = testdim4.id and "
-        + "(( testdim4 . name ) =  'TESTDIM4NAME' ) and (testdim3.dt = 'latest')";
-
-    Assert.assertTrue(hql.contains(partSQL));
+      "SELECT citydim.name, testDim4.name, msr2 FROM testCube "
+        + " left outer join citydim ON testcube.cityid = citydim .id and citydim.name = 'FOOBAR'"
+        + " right outer join testdim2 on testcube.dim2 = testdim2.id "
+        + " right outer join testdim3 on testdim2.testdim3id = testdim3.id "
+        + " right outer join testDim4 on testdim3.testdim4id = testdim4.id and testDim4.name='TESTDIM4NAME'"
+        + " WHERE " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, hconf);
+    String expected = getExpectedQuery("testcube", "select citydim.name, testDim4.name, sum(testcube.msr2) FROM ",
+      " left outer JOIN " + getDbName() + "c1_citytable citydim on testcube.cityid = citydim.id +"
+      + " and (( citydim . name ) =  'FOOBAR' ) and (citydim.dt = 'latest')"
+      + " right outer join " + getDbName()
+      + "c1_testdim2tbl testdim2 on testcube.dim2 = testdim2.id and (testdim2.dt = 'latest')"
+      + " right outer join " + getDbName() + "c1_testdim3tbl testdim3 on testdim2.testdim3id = testdim3.id and "
+      + "(testdim3.dt = 'latest') "
+      + " right outer join " + getDbName() + "c1_testdim4tbl testdim4 on testdim3.testdim4id = testdim4.id and "
+      + "(( testdim4 . name ) =  'TESTDIM4NAME' ) and (testdim4.dt = 'latest')",
+      null, "group by citydim.name, testdim4.name", null,
+      getWhereForDailyAndHourly2days("testcube", "c1_summary3"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
 
   @Test
@@ -258,122 +157,162 @@ public class TestJoinResolver extends TestQueryRewrite {
 
   @Test
   public void testJoinWithoutCondition() throws Exception {
-    String query = "SELECT citydim.name, msr2 FROM testCube WHERE " + TWO_DAYS_RANGE;
-    CubeQueryRewriter driver = new CubeQueryRewriter(hconf, hconf);
-    CubeQueryContext ctx = driver.rewrite(query);
-    String hql = ctx.toHQL();
-    String joinClause = getAutoResolvedFromString(ctx);
-    System.out.println("@Resolved join clause " + joinClause);
-    Assert.assertEquals(getDbName() + "c1_testfact2_raw testcube join " + getDbName() + "c1_citytable citydim on "
-      + "testcube.cityid = citydim.id and (citydim.dt = 'latest')", joinClause.trim());
+    assertLensExceptionInRewrite("SELECT citydim.name, msr2 FROM testCube WHERE " + TWO_DAYS_RANGE, hconf,
+      LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE);
+    assertLensExceptionInRewrite("select cubeState.name, citydim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE,
+      hconf, LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE);
+    assertLensExceptionInRewrite("select citydim.name, statedim.name from citydim limit 10",
+      hconf, LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE);
+    assertLensExceptionInRewrite("select countrydim.name, citystate.name from citydim limit 10",
+      hconf, LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE);
   }
 
+
   @Test
   public void testJoinTypeConf() throws Exception {
     HiveConf tConf = new HiveConf(hconf);
     tConf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "LEFTOUTER");
-    System.out.println("@@Set join type to " + hconf.get(CubeQueryConfUtil.JOIN_TYPE_KEY));
-    CubeQueryRewriter driver = new CubeQueryRewriter(tConf, hconf);
-    String query = "select citydim.name, msr2 FROM testCube WHERE " + TWO_DAYS_RANGE;
-    CubeQueryContext ctx = driver.rewrite(query);
-    String hql = ctx.toHQL();
-    System.out.println("testJoinTypeConf@@Resolved join clause1 - " + getAutoResolvedFromString(ctx));
-    Assert.assertEquals(getDbName() + "c1_testfact2_raw testcube left outer join " + getDbName()
-        + "c1_citytable citydim on testcube.cityid = citydim.id and (citydim.dt = 'latest')",
-      getAutoResolvedFromString(ctx).trim());
+    String query = "select cubecity.name, msr2 FROM testCube WHERE " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, tConf);
+    // Check that aliases are preserved in the join clause
+    String expected = getExpectedQuery("testcube", "select cubecity.name, sum(testcube.msr2) FROM ",
+      " left outer join " + getDbName()
+      + "c1_citytable cubecity ON testcube.cityid = cubecity.id and (cubecity.dt = 'latest')",
+      null, " group by cubecity.name", null, getWhereForHourly2days("testcube", "c1_testfact2"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     tConf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "FULLOUTER");
-    System.out.println("@@Set join type to " + hconf.get(CubeQueryConfUtil.JOIN_TYPE_KEY));
-    driver = new CubeQueryRewriter(tConf, hconf);
-    ctx = driver.rewrite(query);
-    hql = ctx.toHQL();
-    System.out.println("testJoinTypeConf@@Resolved join clause2 - " + getAutoResolvedFromString(ctx));
-    Assert.assertEquals(getDbName() + "c1_testfact2_raw testcube full outer join " + getDbName()
-        + "c1_citytable citydim on testcube.cityid = citydim.id and (citydim.dt = 'latest')",
-      getAutoResolvedFromString(ctx).trim());
+    hqlQuery = rewrite(query, tConf);
+    // Check that aliases are preserved in the join clause
+    expected = getExpectedQuery("testcube", "select cubecity.name, sum(testcube.msr2) FROM ",
+      " full outer join " + getDbName()
+      + "c1_citytable cubecity ON testcube.cityid = cubecity.id and (cubecity.dt = 'latest')",
+      null, " group by cubecity.name", null, getWhereForHourly2days("testcube", "c1_testfact2"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+
+    tConf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "RIGHTOUTER");
+    hqlQuery = rewrite(query, tConf);
+    // Check that aliases are preserved in the join clause
+    expected = getExpectedQuery("testcube", "select cubecity.name, sum(testcube.msr2) FROM ",
+      " right outer join " + getDbName()
+      + "c1_citytable cubecity ON testcube.cityid = cubecity.id",
+      null, " and (cubecity.dt = 'latest') group by cubecity.name", null,
+      getWhereForHourly2days("testcube", "c1_testfact2"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
 
   @Test
-  public void testPreserveTableAlias() throws Exception {
+  public void testPreserveTableAliasWithFullJoin() throws Exception {
     HiveConf tConf = new HiveConf(hconf);
     tConf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "LEFTOUTER");
-    String query = "select c.name, t.msr2 FROM testCube t join citydim c WHERE " + TWO_DAYS_RANGE;
-    CubeQueryRewriter driver = new CubeQueryRewriter(tConf, hconf);
-    CubeQueryContext ctx = driver.rewrite(query);
-    String hql = ctx.toHQL();
-    System.out.println("testPreserveTableAlias@@HQL:" + hql);
-    System.out.println("testPreserveTableAlias@@Resolved join clause - " + getAutoResolvedFromString(ctx));
+    String query = "select c.name, t.msr2 FROM testCube t join citydim c on t.cityid = c.id WHERE " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, tConf);
+    // Check that aliases are preserved in the join clause
+    // Conf will be ignored in this case since user has specified the join condition
+    String expected = getExpectedQuery("t", "select c.name, sum(t.msr2) FROM ",
+      " inner join " + getDbName() + "c1_citytable c ON t.cityid = c.id and c.dt = 'latest'",
+      null, " group by c.name", null, getWhereForHourly2days("t", "c1_testfact2"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testPreserveTableAliasWithAutoJoin() throws Exception {
+    HiveConf tConf = new HiveConf(hconf);
+    tConf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "LEFTOUTER");
+    String query = "select cubecity.name, t.msr2 FROM testCube t WHERE " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, tConf);
     // Check that aliases are preserved in the join clause
-    // Conf will be ignored in this case since user has specified partial join
-    Assert.assertEquals(getDbName() + "c1_testfact2_raw t inner join " + getDbName()
-      + "c1_citytable c on t.cityid = c.id and (c.dt = 'latest')", getAutoResolvedFromString(ctx).trim());
-    String whereClause = hql.substring(hql.indexOf("WHERE"));
-    // Check that the partition condition is not added again in where clause
-    Assert.assertFalse(whereClause.contains("c.dt = 'latest'"));
+    String expected = getExpectedQuery("t", "select cubecity.name, sum(t.msr2) FROM ",
+      " left outer join " + getDbName()
+      + "c1_citytable cubecity ON t.cityid = cubecity.id and (cubecity.dt = 'latest')",
+      null, " group by cubecity.name", null, getWhereForHourly2days("t", "c1_testfact2"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
 
   @Test
-  public void testDimOnlyQuery() throws Exception {
+  public void testDimOnlyQueryWithAutoJoin() throws Exception {
     HiveConf tConf = new HiveConf(hconf);
     tConf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "INNER");
-    String query = "select citydim.name, statedim.name from citydim limit 10";
-    HiveConf dimOnlyConf = new HiveConf(tConf);
-    CubeQueryRewriter rewriter = new CubeQueryRewriter(dimOnlyConf, hconf);
-    CubeQueryContext ctx = rewriter.rewrite(query);
-    String hql = ctx.toHQL();
-    System.out.println("testDimOnlyQuery@@@HQL:" + hql);
-    System.out.println("testDimOnlyQuery@@@Resolved join clause: " + getAutoResolvedFromString(ctx));
-    Assert.assertTrue(hql.matches(".*?WHERE\\W+citydim.dt = 'latest'\\W+LIMIT 10.*?"));
-    Assert.assertEquals(getDbName() + "c1_citytable citydim inner join " + getDbName()
-        + "c1_statetable statedim on citydim.stateid = statedim.id and (statedim.dt = 'latest')",
-      getAutoResolvedFromString(ctx).trim());
-
-    String queryWithJoin = "select citydim.name, statedim.name from citydim join statedim";
-    ctx = rewriter.rewrite(queryWithJoin);
-    hql = ctx.toHQL();
-    System.out.println("testDimOnlyQuery@@@HQL2:" + hql);
-    HQLParser.parseHQL(hql, tConf);
-    Assert.assertEquals(getDbName() + "c1_citytable citydim inner join " + getDbName()
-        + "c1_statetable statedim on citydim.stateid = statedim.id and (statedim.dt = 'latest')",
-      getAutoResolvedFromString(ctx).trim());
+    String query = "select citydim.name, citystate.name from citydim limit 10";
+    String hqlQuery = rewrite(query, tConf);
+    String expected =
+      getExpectedQuery("citydim", "select citydim.name, citystate.name from ", " inner join " + getDbName()
+        + "c1_statetable citystate on citydim.stateid = citystate.id and (citystate.dt = 'latest')",
+        null, " limit 10", "c1_citytable", true);
+    compareQueries(hqlQuery, expected);
   }
 
   @Test
-  public void testStorageFilterPushdown() throws Exception {
-    String q = "SELECT citydim.name, statedim.name FROM citydim";
+  public void testDimOnlyQueryWithFullJoin() throws Exception {
+    HiveConf tConf = new HiveConf(hconf);
+    tConf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "INNER");
+    String queryWithJoin = "select citydim.name, statedim.name from citydim join statedim on citydim.stateid = "
+      + "statedim.id";
+
+    String hqlQuery = rewrite(queryWithJoin, tConf);
+    String expected =
+      getExpectedQuery("citydim", "select citydim.name, statedim.name from ", " inner join " + getDbName()
+        + "c1_statetable statedim on citydim.stateid = statedim.id and citydim.dt='latest' and statedim.dt='latest'",
+        null, null, "c1_citytable", false);
+    compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testStorageFilterPushdownWithFullJoin() throws Exception {
+    String q1 = "SELECT citydim.name, statedim.name FROM citydim left outer join statedim on citydim.stateid = "
+      + "statedim.id";
+    String hqlQuery = rewrite(q1, hconf);
+    String expected =
+      getExpectedQuery("citydim", "select citydim.name, statedim.name from ", " left outer join " + getDbName()
+        + "c1_statetable statedim on citydim.stateid = statedim.id and citydim.dt='latest' and statedim.dt='latest'",
+        null, null, "c1_citytable", false);
+    compareQueries(hqlQuery, expected);
+
+    String q2 = "SELECT citydim.name, statedim.name FROM citydim right outer join statedim on citydim.stateid = "
+      + "statedim.id";
+    hqlQuery = rewrite(q2, hconf);
+    expected =
+      getExpectedQuery("citydim", "select citydim.name, statedim.name from ", " right outer join " + getDbName()
+        + "c1_statetable statedim on citydim.stateid = statedim.id and citydim.dt='latest' and statedim.dt='latest'",
+        null, null, "c1_citytable", false);
+    compareQueries(hqlQuery, expected);
+
+    String q3 = "SELECT citydim.name, statedim.name FROM citydim full outer join statedim on citydim.stateid = "
+      + "statedim.id";
+    hqlQuery = rewrite(q3, hconf);
+    expected =
+      getExpectedQuery("citydim", "select citydim.name, statedim.name from ", " full outer join " + getDbName()
+        + "c1_statetable statedim on citydim.stateid = statedim.id and citydim.dt='latest' and statedim.dt='latest'",
+        null, null, "c1_citytable", false);
+  }
+
+  @Test
+  public void testStorageFilterPushdownWithAutoJoin() throws Exception {
+    String q = "SELECT citydim.name, citystate.name FROM citydim limit 10";
     HiveConf conf = new HiveConf(hconf);
     conf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "LEFTOUTER");
-    CubeQueryRewriter rewriter = new CubeQueryRewriter(conf, hconf);
-    CubeQueryContext context = rewriter.rewrite(q);
-    String hql = context.toHQL();
-    System.out.println("##1 hql " + hql);
-    System.out.println("##1 " + getAutoResolvedFromString(context));
-    Assert.assertEquals(getDbName() + "c1_citytable citydim left outer join " + getDbName()
-        + "c1_statetable statedim on citydim.stateid = statedim.id" + " and (statedim.dt = 'latest')",
-      getAutoResolvedFromString(context).trim());
-    Assert.assertTrue(hql.matches(".*?WHERE\\W+citydim.dt = 'latest'\\W+.*?"));
+    String hqlQuery = rewrite(q, conf);
+    String expected =
+      getExpectedQuery("citydim", "select citydim.name, citystate.name from ", " left outer join " + getDbName()
+        + "c1_statetable citystate on citydim.stateid = citystate.id and (citystate.dt = 'latest')",
+        null, " limit 10", "c1_citytable", true);
+    compareQueries(hqlQuery, expected);
 
     conf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "RIGHTOUTER");
-    rewriter = new CubeQueryRewriter(conf, hconf);
-    context = rewriter.rewrite(q);
-    hql = context.toHQL();
-    System.out.println("##2 hql " + hql);
-    System.out.println("##2 " + getAutoResolvedFromString(context));
-    Assert.assertEquals(getDbName() + "c1_citytable citydim right outer join " + getDbName()
-        + "c1_statetable statedim on citydim.stateid = statedim.id " + "and (citydim.dt = 'latest')",
-      getAutoResolvedFromString(context).trim());
-    Assert.assertTrue(hql.matches(".*?WHERE\\W+statedim.dt = 'latest'\\W+.*?"));
+    hqlQuery = rewrite(q, conf);
+    expected =
+      getExpectedQuery("citydim", "select citydim.name, citystate.name from ", " right outer join " + getDbName()
+        + "c1_statetable citystate on citydim.stateid = citystate.id and (citydim.dt = 'latest')",
+        " citystate.dt='latest' ", "limit 10", "c1_citytable", false);
+    compareQueries(hqlQuery, expected);
 
     conf.set(CubeQueryConfUtil.JOIN_TYPE_KEY, "FULLOUTER");
-    rewriter = new CubeQueryRewriter(conf, hconf);
-    context = rewriter.rewrite(q);
-    hql = context.toHQL();
-    System.out.println("##3 hql " + hql);
-    System.out.println("##3 " + getAutoResolvedFromString(context));
-    Assert.assertEquals(getDbName() + "c1_citytable citydim full outer join " + getDbName()
-      + "c1_statetable statedim on citydim.stateid = statedim.id "
-      + "and (citydim.dt = 'latest') and (statedim.dt = 'latest')", getAutoResolvedFromString(context).trim());
-    Assert.assertTrue(!hql.contains("WHERE"));
+    hqlQuery = rewrite(q, conf);
+    expected =
+      getExpectedQuery("citydim", "select citydim.name, citystate.name from ", " full outer join " + getDbName()
+        + "c1_statetable citystate on citydim.stateid = citystate.id and (citydim.dt = 'latest')"
+        + " and citystate.dt='latest'", null, "limit 10", "c1_citytable", false);
+    compareQueries(hqlQuery, expected);
   }
 
   @Test
@@ -434,14 +373,14 @@ public class TestJoinResolver extends TestQueryRewrite {
       null, getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
-    // Single join chain and an unrelated dimension
-    query = "select cubeState.name, citydim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    // Two unrelated join chains
+    query = "select cubeState.name, cubecity.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
     hqlQuery = rewrite(query, hconf);
     expected = getExpectedQuery("basecube",
-      "select cubestate.name, citydim.name, sum(basecube.msr2) FROM ",
+      "select cubestate.name, cubecity.name, sum(basecube.msr2) FROM ",
       " join " + getDbName() + "c1_statetable cubestate on basecube.stateid = cubestate.id and cubestate.dt = 'latest'"
-        + " join " + getDbName() + "c1_citytable citydim on basecube.cityid = citydim.id and citydim.dt = 'latest'",
-      null, "group by cubestate.name,citydim.name", null,
+        + " join " + getDbName() + "c1_citytable cubecity on basecube.cityid = cubecity.id and cubecity.dt = 'latest'",
+      null, "group by cubestate.name,cubecity.name", null,
       getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base")
     );
     TestCubeRewriter.compareQueries(hqlQuery, expected);
@@ -580,149 +519,110 @@ public class TestJoinResolver extends TestQueryRewrite {
   public void testConflictingJoins() throws ParseException, LensException, HiveException {
     // Single joinchain with two paths, intermediate dimension accessed separately by name.
     String query = "select cityState.name, citydim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    try {
-      rewrite(query, hconf);
-      Assert.fail("Should have failed. "
-        + "The table citydim is getting accessed as both chain and without chain ");
-    } catch (LensException e) {
-      Assert.assertEquals(e.getMessage().toLowerCase(),
-        "Table citydim is getting accessed via joinchain: citystate and no chain at all".toLowerCase());
-    }
+    assertLensExceptionInRewrite(query, hconf, LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE);
 
     // Multi joinchains + a dimension part of one of the chains.
     query = "select cityState.name, cubeState.name, citydim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    try {
-      rewrite(query, hconf);
-      Assert.fail("Should have failed. "
-        + "The table citydim is getting accessed as both chain and without chain ");
-    } catch (LensException e) {
-      Assert.assertEquals(e.getMessage().toLowerCase(),
-        "Table citydim is getting accessed via joinchain: citystate and no chain at all".toLowerCase());
-    }
+    assertLensExceptionInRewrite(query, hconf, LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE);
 
     // this test case should pass when default qualifiers for dimensions' chains are added
     // Two joinchains with same destination, and the destination table accessed separately
     query = "select cityState.name, cubeState.name, statedim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    try {
-      rewrite(query, hconf);
-      Assert.fail("Should have failed. "
-        + "It's not possible to resolve which statedim is being asked for when cityState and cubeState both end at"
-        + " statedim table.");
-    } catch (LensException e) {
-      Assert.assertEquals(
-        e.getMessage().indexOf("Table statedim has 2 different paths through joinchains"), 0);
-    }
+    assertLensExceptionInRewrite(query, hconf, LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE);
 
     // this test case should pass when default qualifiers for dimensions' chains are added
     // Two Single joinchain, And dest table accessed separately.
     query = "select cubeState.name, statedim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    try {
-      rewrite(query, hconf);
-      Assert.fail("Should have failed. "
-        + "The table statedim is getting accessed as both cubeState and statedim ");
-    } catch (LensException e) {
-      Assert.assertEquals(e.getMessage().toLowerCase(),
-        "Table statedim is getting accessed via two different names: [cubestate, statedim]".toLowerCase());
-    }
+    assertLensExceptionInRewrite(query, hconf, LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE);
+
     // this should pass when default qualifiers are added
     query = "select cityStateCapital, statedim.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    try {
-      rewrite(query, hconf);
-      Assert.fail("Should have failed. "
-        + "The table statedim is getting accessed as both cubeState and statedim ");
-    } catch (LensException e) {
-      Assert.assertEquals(e.getMessage().toLowerCase(),
-        "Table statedim is getting accessed via two different names: [citystate, statedim]".toLowerCase());
-    }
+    assertLensExceptionInRewrite(query, hconf, LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE);
 
     // table accessed through denorm column and chain column
     Configuration conf = new Configuration(hconf);
     conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C3, C4");
     String failingQuery = "select testDim2.cityname, testDim2.cityStateCapital FROM testDim2 where " + TWO_DAYS_RANGE;
-    try {
-      rewrite(failingQuery, conf);
-      Assert.fail("Should have failed. "
-        + "The table citydim is getting accessed as both chain and without chain ");
-    } catch (LensException e) {
-      Assert.assertEquals(e.getMessage().toLowerCase(),
-        "Table citydim is getting accessed via joinchain: citystate and no chain at all".toLowerCase());
-    }
+    assertLensExceptionInRewrite(failingQuery, conf, LensCubeErrorCode.NO_REF_COL_AVAILABLE);
   }
 
   @Test
   public void testMultiPaths() throws ParseException, LensException, HiveException {
     String query, hqlQuery, expected;
 
-    query = "select testdim3.name, sum(msr2) from testcube where " + TWO_DAYS_RANGE;
+    query = "select dim3chain.name, sum(msr2) from testcube where " + TWO_DAYS_RANGE;
     hqlQuery = rewrite(query, hconf);
-    expected = getExpectedQuery("testcube", "select testdim3.name, sum(testcube.msr2) FROM ",
-      " join " + getDbName() + "c1_testdim3tbl testdim3 ON testcube.testdim3id=testdim3.id and testdim3.dt='latest'",
-      null, "group by testdim3.name",
+    expected = getExpectedQuery("testcube", "select dim3chain.name, sum(testcube.msr2) FROM ",
+      " join " + getDbName() + "c1_testdim3tbl dim3chain ON testcube.testdim3id=dim3chain.id and dim3chain.dt='latest'",
+      null, "group by dim3chain.name",
       null, getWhereForDailyAndHourly2days("testcube", "c1_summary1"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     // hit a fact where there is no direct path
-    query = "select testdim3.name, avg(msr2) from testcube where " + TWO_DAYS_RANGE;
+    query = "select dim3chain.name, avg(msr2) from testcube where " + TWO_DAYS_RANGE;
     hqlQuery = rewrite(query, hconf);
-    expected = getExpectedQuery("testcube", "select testdim3.name, avg(testcube.msr2) FROM ",
+    expected = getExpectedQuery("testcube", "select dim3chain.name, avg(testcube.msr2) FROM ",
       " join " + getDbName() + "c1_testdim2tbl testdim2 ON testcube.dim2 = testdim2.id and testdim2.dt = 'latest'"
-        + " join " + getDbName() + "c1_testdim3tbl testdim3 "
-        + "ON testdim2.testdim3id = testdim3.id and testdim3.dt = 'latest'",
-      null, "group by testdim3.name",
+        + " join " + getDbName() + "c1_testdim3tbl dim3chain "
+        + "ON testdim2.testdim3id = dim3chain.id and dim3chain.dt = 'latest'",
+      null, "group by dim3chain.name",
       null, getWhereForHourly2days("testcube", "c1_testfact2_raw"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     // resolve denorm variable through multi hop chain paths
     query = "select testdim3id, avg(msr2) from testcube where " + TWO_DAYS_RANGE;
     hqlQuery = rewrite(query, hconf);
-    expected = getExpectedQuery("testcube", "select testdim3.id, avg(testcube.msr2) FROM ",
+    expected = getExpectedQuery("testcube", "select dim3chain.id, avg(testcube.msr2) FROM ",
       " join " + getDbName() + "c1_testdim2tbl testdim2 ON testcube.dim2 = testdim2.id and testdim2.dt = 'latest'"
-        + " join " + getDbName() + "c1_testdim3tbl testdim3 "
-        + "ON testdim2.testdim3id = testdim3.id and testdim3.dt = 'latest'",
-      null, "group by testdim3.id",
+        + " join " + getDbName() + "c1_testdim3tbl dim3chain "
+        + "ON testdim2.testdim3id = dim3chain.id and dim3chain.dt = 'latest'",
+      null, "group by dim3chain.id",
       null, getWhereForHourly2days("testcube", "c1_testfact2_raw"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     // tests from multiple different chains
-    query = "select testdim4.name, testdim3id, avg(msr2) from testcube where " + TWO_DAYS_RANGE;
+    query = "select dim4chain.name, testdim3id, avg(msr2) from testcube where " + TWO_DAYS_RANGE;
     hqlQuery = rewrite(query, hconf);
-    expected = getExpectedQuery("testcube", "select testdim4.name, testdim3.id, avg(testcube.msr2) FROM ",
+    expected = getExpectedQuery("testcube", "select dim4chain.name, dim3chain.id, avg(testcube.msr2) FROM ",
       " join " + getDbName() + "c1_testdim2tbl testdim2 ON testcube.dim2 = testdim2.id and testdim2.dt = 'latest'"
-        + " join " + getDbName() + "c1_testdim3tbl testdim3 ON testdim2.testdim3id=testdim3.id and testdim3.dt='latest'"
-        + " join " + getDbName() + "c1_testdim4tbl testdim4 ON testdim3.testDim4id = testdim4.id and"
-        + " testdim4.dt = 'latest'", null, "group by testdim4.name, testdim3.id", null,
+        + " join " + getDbName()
+        + "c1_testdim3tbl dim3chain ON testdim2.testdim3id=dim3chain.id and dim3chain.dt='latest'"
+        + " join " + getDbName() + "c1_testdim4tbl dim4chain ON dim3chain.testDim4id = dim4chain.id and"
+        + " dim4chain.dt = 'latest'", null, "group by dim4chain.name, dim3chain.id", null,
       getWhereForHourly2days("testcube", "c1_testfact2_raw"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
-    query = "select citydim.name, testdim4.name, testdim3id, avg(msr2) from testcube where " + TWO_DAYS_RANGE;
+    query = "select cubecity.name, dim4chain.name, testdim3id, avg(msr2) from testcube where " + TWO_DAYS_RANGE;
     hqlQuery = rewrite(query, hconf);
-    expected = getExpectedQuery("testcube", "select citydim.name, testdim4.name, testdim3.id, avg(testcube.msr2) FROM ",
+    expected = getExpectedQuery("testcube", "select cubecity.name, dim4chain.name, dim3chain.id, avg(testcube.msr2) "
+        + "FROM ",
       " join " + getDbName() + "c1_testdim2tbl testdim2 ON testcube.dim2 = testdim2.id and testdim2.dt = 'latest'"
-        + " join " + getDbName() + "c1_testdim3tbl testdim3 ON testdim2.testdim3id=testdim3.id and testdim3.dt='latest'"
-        + " join " + getDbName() + "c1_testdim4tbl testdim4 ON testdim3.testDim4id = testdim4.id and"
-        + " testdim4.dt = 'latest'"
-        + " join " + getDbName() + "c1_citytable citydim ON testcube.cityid = citydim.id and citydim.dt = 'latest'"
-      , null, "group by citydim.name, testdim4.name, testdim3.id", null,
+        + " join " + getDbName()
+        + "c1_testdim3tbl dim3chain ON testdim2.testdim3id=dim3chain.id and dim3chain.dt='latest'"
+        + " join " + getDbName() + "c1_testdim4tbl dim4chain ON dim3chain.testDim4id = dim4chain.id and"
+        + " dim4chain.dt = 'latest'"
+        + " join " + getDbName() + "c1_citytable cubecity ON testcube.cityid = cubecity.id and cubecity.dt = 'latest'"
+      , null, "group by cubecity.name, dim4chain.name, dim3chain.id", null,
       getWhereForHourly2days("testcube", "c1_testfact2_raw"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     // test multi hops
-    query = "select testdim4.name, avg(msr2) from testcube where " + TWO_DAYS_RANGE;
+    query = "select dim4chain.name, avg(msr2) from testcube where " + TWO_DAYS_RANGE;
     hqlQuery = rewrite(query, hconf);
-    expected = getExpectedQuery("testcube", "select testdim4.name, avg(testcube.msr2) FROM ",
+    expected = getExpectedQuery("testcube", "select dim4chain.name, avg(testcube.msr2) FROM ",
       " join " + getDbName() + "c1_testdim2tbl testdim2 ON testcube.dim2 = testdim2.id and testdim2.dt = 'latest'"
         + " join " + getDbName() + "c1_testdim3tbl testdim3 ON testdim2.testdim3id=testdim3.id and testdim3.dt='latest'"
-        + " join " + getDbName() + "c1_testdim4tbl testdim4 ON testdim3.testDim4id = testdim4.id and"
-        + " testdim4.dt = 'latest'", null, "group by testdim4.name", null,
+        + " join " + getDbName() + "c1_testdim4tbl dim4chain ON testdim3.testDim4id = dim4chain.id and"
+        + " dim4chain.dt = 'latest'", null, "group by dim4chain.name", null,
       getWhereForHourly2days("testcube", "c1_testfact2_raw"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
-    query = "select testdim4.name, sum(msr2) from testcube where " + TWO_DAYS_RANGE;
+    query = "select dim4chain.name, sum(msr2) from testcube where " + TWO_DAYS_RANGE;
     hqlQuery = rewrite(query, hconf);
-    expected = getExpectedQuery("testcube", "select testdim4.name, sum(testcube.msr2) FROM ",
+    expected = getExpectedQuery("testcube", "select dim4chain.name, sum(testcube.msr2) FROM ",
       " join " + getDbName() + "c1_testdim3tbl testdim3 ON testcube.testdim3id = testdim3.id and testdim3.dt = 'latest'"
-        + " join " + getDbName() + "c1_testdim4tbl testdim4 ON testdim3.testDim4id = testdim4.id and"
-        + " testdim4.dt = 'latest'", null, "group by testdim4.name", null,
+        + " join " + getDbName() + "c1_testdim4tbl dim4chain ON testdim3.testDim4id = dim4chain.id and"
+        + " dim4chain.dt = 'latest'", null, "group by dim4chain.name", null,
       getWhereForDailyAndHourly2days("testcube", "c1_summary1"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
@@ -748,13 +648,17 @@ public class TestJoinResolver extends TestQueryRewrite {
 
   @Test
   public void testUnreachableDim() throws ParseException, LensException, HiveException {
-    LensException e1 = getLensExceptionInRewrite("select urdimid from testdim2", hconf);
-    assertNotNull(e1);
-    assertEquals(e1.getErrorCode(), LensCubeErrorCode.NO_DIM_HAS_COLUMN.getLensErrorInfo().getErrorCode());
-
-    LensException e2 = getLensExceptionInRewrite("select urdimid from testcube where " + TWO_DAYS_RANGE, hconf);
-    assertNotNull(e2);
-    assertEquals(e2.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
+    assertLensExceptionInRewrite("select urdimid from testdim2", hconf, LensCubeErrorCode.NO_DIM_HAS_COLUMN);
+    assertLensExceptionInRewrite("select urdimid from testcube where " + TWO_DAYS_RANGE, hconf,
+      LensCubeErrorCode.NO_FACT_HAS_COLUMN);
+    assertLensExceptionInRewrite("select unreachableName from testdim2", hconf,
+      LensCubeErrorCode.NO_DIM_HAS_COLUMN);
+    assertLensExceptionInRewrite("select unreachableName from testcube where " + TWO_DAYS_RANGE, hconf,
+      LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE);
+    assertLensExceptionInRewrite("select unreachableDim_chain.name from testdim2", hconf,
+      LensCubeErrorCode.NO_JOIN_PATH);
+    assertLensExceptionInRewrite("select unreachableDim_chain.name from testcube where " + TWO_DAYS_RANGE, hconf,
+      LensCubeErrorCode.NO_FACT_HAS_COLUMN);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
index d69635d..0aa31f4 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryRewrite.java
@@ -19,11 +19,15 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
+
 import java.io.IOException;
 
 import org.apache.lens.api.error.ErrorCollection;
 import org.apache.lens.api.error.ErrorCollectionFactory;
 import org.apache.lens.api.error.LensError;
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
@@ -114,6 +118,12 @@ public abstract class TestQueryRewrite {
     }
   }
 
+  protected void assertLensExceptionInRewrite(String query, Configuration conf, LensCubeErrorCode expectedError)
+    throws LensException, ParseException {
+    LensException e = getLensExceptionInRewrite(query, conf);
+    assertNotNull(e);
+    assertEquals(e.getErrorCode(), expectedError.getLensErrorInfo().getErrorCode());
+  }
   protected String getLensExceptionErrorMessageInRewrite(String query, Configuration conf) throws LensException,
       ParseException, ClassNotFoundException {
     try {

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
index 5a072e4..2d7babb 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
@@ -67,8 +67,9 @@ public class TestRewriterPlan extends TestQueryRewrite {
   public void testPlanExtractionForComplexQuery() throws Exception {
     // complex query
     Configuration conf = getConfWithStorages("C1,C2");
-    CubeQueryContext ctx = rewriteCtx("cube select citydim.name, SUM(msr2) from testCube where citydim.name != \"XYZ\""
-      + " and " + TWO_DAYS_RANGE + " having sum(msr2) > 1000 order by citydim.name limit 50", conf);
+    CubeQueryContext ctx = rewriteCtx("cube select cubecity.name, SUM(msr2) from testCube where "
+      + " cubecity.name != \"XYZ\" and " + TWO_DAYS_RANGE + " having sum(msr2) > 1000 order by cubecity.name limit 50",
+      conf);
     ctx.toHQL();
     RewriterPlan plan = new RewriterPlan(Collections.singleton(ctx));
     Assert.assertNotNull(plan);
@@ -90,8 +91,9 @@ public class TestRewriterPlan extends TestQueryRewrite {
     Configuration conf = getConfWithStorages("C1,C2");
     CubeQueryContext ctx1 = rewriteCtx("cube select SUM(msr2) from testCube where " + TWO_DAYS_RANGE, conf);
     ctx1.toHQL();
-    CubeQueryContext ctx2 = rewriteCtx("cube select citydim.name, SUM(msr2) from testCube where citydim.name != \"XYZ\""
-      + " and " + TWO_DAYS_RANGE + " having sum(msr2) > 1000 order by citydim.name limit 50", conf);
+    CubeQueryContext ctx2 = rewriteCtx("cube select cubecity.name, SUM(msr2) from testCube where "
+      + " cubecity.name != \"XYZ\" and " + TWO_DAYS_RANGE + " having sum(msr2) > 1000 order by cubecity.name limit 50",
+      conf);
     ctx2.toHQL();
     RewriterPlan plan = new RewriterPlan(Arrays.asList(ctx1, ctx2));
     Assert.assertNotNull(plan);

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
index b7372f1..a0ee56d 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
@@ -154,14 +154,14 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
       "SELECT test_time_dim, msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD;
     String hqlQuery = rewrite(query, tconf);
     Map<String, String> whereClauses = new HashMap<String, String>();
-    whereClauses.put(getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause("hourdim",
+    whereClauses.put(getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause("timehourchain1",
       "full_hour", getUptoHour(TWODAYS_BACK),
       getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT));
     System.out.println("HQL:" + hqlQuery);
     String expected =
-      getExpectedQuery(cubeName, "select hourdim.full_hour, sum(testcube.msr2) FROM ", " join " + getDbName()
-          + "c4_hourDimTbl hourdim on testcube.test_time_dim_hour_id  = hourdim.id", null,
-        " GROUP BY hourdim.full_hour", null, whereClauses);
+      getExpectedQuery(cubeName, "select timehourchain1.full_hour, sum(testcube.msr2) FROM ", " join " + getDbName()
+          + "c4_hourDimTbl timehourchain1 on testcube.test_time_dim_hour_id  = timehourchain1.id", null,
+        " GROUP BY timehourchain1.full_hour", null, whereClauses);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     query =
@@ -170,7 +170,8 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     System.out.println("HQL:" + hqlQuery);
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
-        + "c4_hourDimTbl hourdim on testcube.test_time_dim_hour_id  = hourdim.id", null, null, null, whereClauses);
+        + "c4_hourDimTbl timehourchain1 on testcube.test_time_dim_hour_id  = timehourchain1.id", null, null, null,
+        whereClauses);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     query =
@@ -179,7 +180,8 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     System.out.println("HQL:" + hqlQuery);
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
-          + "c4_hourDimTbl hourdim on testcube.test_time_dim_hour_id  = hourdim.id", " testcube.cityid > 2 ",
+          + "c4_hourDimTbl timehourchain1 on testcube.test_time_dim_hour_id  = timehourchain1.id",
+        " testcube.cityid > 2 ",
         " and testcube.cityid != 5", null, whereClauses);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
@@ -192,16 +194,17 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     whereClauses = new HashMap<>();
     whereClauses.put(
       getDbName() + "c4_testfact2",
-      TestBetweenTimeRangeWriter.getBetweenClause("hourdim", "full_hour", getUptoHour(TWODAYS_BACK),
+      TestBetweenTimeRangeWriter.getBetweenClause("timehourchain1", "full_hour", getUptoHour(TWODAYS_BACK),
         getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())),
         TestTimeRangeWriter.DB_FORMAT)
         + " OR "
-        + TestBetweenTimeRangeWriter.getBetweenClause("hourdim", "full_hour", getUptoHour(BEFORE_6_DAYS),
+        + TestBetweenTimeRangeWriter.getBetweenClause("timehourchain1", "full_hour", getUptoHour(BEFORE_6_DAYS),
         getUptoHour(getOneLess(BEFORE_4_DAYS, UpdatePeriod.HOURLY.calendarField())),
         TestTimeRangeWriter.DB_FORMAT));
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
-        + "c4_hourDimTbl hourdim on testcube.test_time_dim_hour_id  = hourdim.id", null, null, null, whereClauses);
+        + "c4_hourDimTbl timehourchain1 on testcube.test_time_dim_hour_id  = timehourchain1.id", null, null, null,
+        whereClauses);
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
@@ -211,9 +214,9 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
           + " OR " + TWO_DAYS_RANGE_TTD_BEFORE_4_DAYS, tconf);
 
     expected =
-      getExpectedQuery(cubeName, "select to_date(hourdim.full_hour), sum(testcube.msr2) FROM ", " join "
-          + getDbName() + "c4_hourDimTbl hourdim on testcube.test_time_dim_hour_id  = hourdim.id", null,
-        " group by to_date(hourdim.full_hour)", null, whereClauses);
+      getExpectedQuery(cubeName, "select to_date(timehourchain1.full_hour), sum(testcube.msr2) FROM ", " join "
+          + getDbName() + "c4_hourDimTbl timehourchain1 on testcube.test_time_dim_hour_id  = timehourchain1.id", null,
+        " group by to_date(timehourchain1.full_hour)", null, whereClauses);
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
@@ -233,13 +236,13 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     String hqlQuery = rewrite(query, tconf);
     Map<String, String> whereClauses = new HashMap<String, String>();
     whereClauses.put(getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause(
-      "timehourchain", "full_hour", getUptoHour(TWODAYS_BACK),
+      "timehourchain2", "full_hour", getUptoHour(TWODAYS_BACK),
       getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT));
     System.out.println("HQL:" + hqlQuery);
     String expected =
-      getExpectedQuery(cubeName, "select timehourchain.full_hour, sum(testcube.msr2) FROM ", " join " + getDbName()
-          + "c4_hourDimTbl timehourchain on testcube.test_time_dim_hour_id2  = timehourchain.id", null,
-        " GROUP BY timehourchain.full_hour", null, whereClauses);
+      getExpectedQuery(cubeName, "select timehourchain2.full_hour, sum(testcube.msr2) FROM ", " join " + getDbName()
+          + "c4_hourDimTbl timehourchain2 on testcube.test_time_dim_hour_id2  = timehourchain2.id", null,
+        " GROUP BY timehourchain2.full_hour", null, whereClauses);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     query =
@@ -248,7 +251,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     System.out.println("HQL:" + hqlQuery);
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
-        + "c4_hourDimTbl timehourchain on testcube.test_time_dim_hour_id2  = timehourchain.id", null, null, null,
+        + "c4_hourDimTbl timehourchain2 on testcube.test_time_dim_hour_id2  = timehourchain2.id", null, null, null,
         whereClauses);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
@@ -258,7 +261,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     System.out.println("HQL:" + hqlQuery);
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
-          + "c4_hourDimTbl timehourchain on testcube.test_time_dim_hour_id2  = timehourchain.id",
+          + "c4_hourDimTbl timehourchain2 on testcube.test_time_dim_hour_id2  = timehourchain2.id",
           " testcube.cityid > 2 ", " and testcube.cityid != 5", null, whereClauses);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
@@ -268,19 +271,19 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
         "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE_TTD2
           + " OR " + TWO_DAYS_RANGE_TTD2_BEFORE_4_DAYS, tconf);
 
-    whereClauses = new HashMap<String, String>();
+    whereClauses = new HashMap<>();
     whereClauses.put(
       getDbName() + "c4_testfact2",
-      TestBetweenTimeRangeWriter.getBetweenClause("timehourchain", "full_hour", getUptoHour(TWODAYS_BACK),
+      TestBetweenTimeRangeWriter.getBetweenClause("timehourchain2", "full_hour", getUptoHour(TWODAYS_BACK),
         getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())),
         TestTimeRangeWriter.DB_FORMAT)
         + " OR "
-        + TestBetweenTimeRangeWriter.getBetweenClause("timehourchain", "full_hour", getUptoHour(BEFORE_6_DAYS),
+        + TestBetweenTimeRangeWriter.getBetweenClause("timehourchain2", "full_hour", getUptoHour(BEFORE_6_DAYS),
         getUptoHour(getOneLess(BEFORE_4_DAYS, UpdatePeriod.HOURLY.calendarField())),
         TestTimeRangeWriter.DB_FORMAT));
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
-        + "c4_hourDimTbl timehourchain on testcube.test_time_dim_hour_id2  = timehourchain.id", null, null, null,
+        + "c4_hourDimTbl timehourchain2 on testcube.test_time_dim_hour_id2  = timehourchain2.id", null, null, null,
         whereClauses);
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
@@ -291,9 +294,9 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
           + " OR " +TWO_DAYS_RANGE_TTD2_BEFORE_4_DAYS, tconf);
 
     expected =
-      getExpectedQuery(cubeName, "select to_date(timehourchain.full_hour), sum(testcube.msr2) FROM ", " join "
-          + getDbName() + "c4_hourDimTbl timehourchain on testcube.test_time_dim_hour_id2  = timehourchain.id", null,
-        " group by to_date(timehourchain.full_hour)", null, whereClauses);
+      getExpectedQuery(cubeName, "select to_date(timehourchain2.full_hour), sum(testcube.msr2) FROM ", " join "
+          + getDbName() + "c4_hourDimTbl timehourchain2 on testcube.test_time_dim_hour_id2  = timehourchain2.id", null,
+        " group by to_date(timehourchain2.full_hour)", null, whereClauses);
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-examples/src/main/resources/cube-queries.sql
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/cube-queries.sql b/lens-examples/src/main/resources/cube-queries.sql
index 13c5204..9f4a353 100644
--- a/lens-examples/src/main/resources/cube-queries.sql
+++ b/lens-examples/src/main/resources/cube-queries.sql
@@ -26,33 +26,33 @@ cube select dim1, measure2 from sample_cube where time_range_in(dt, '2014-06-24-
 cube select dim3, measure3 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
 cube select dim3, measure3 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00')
 cube select dim3, measure3 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01')
-cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
-cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00')
-cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01')
-cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') order by sample_dim.name
-cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') order by sample_dim.name
-cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') order by sample_dim.name
-cube select sample_dim.name, measure3 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') and sample_dim.name != "first"
-cube select sample_dim.name, measure3 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') and sample_dim.name != "first"
-cube select sample_dim.name, measure3 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') and sample_dim.name != "first"
-cube select sample_dim.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') and sample_dim.name != "first" order by sample_dim.name
-cube select sample_dim.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') and sample_dim.name != "first" order by sample_dim.name desc
-cube select sample_dim.name, measure2 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') and sample_dim.name != "first" order by sample_dim.name
-cube select sample_dim.name, measure2 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') and sample_dim.name != "first" order by sample_dim.name desc
-cube select sample_dim.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') and sample_dim.name != "first" order by sample_dim.name
-cube select sample_dim.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') and sample_dim.name != "first" order by sample_dim.name desc
-cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') order by sample_dim.name limit 2
-cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') order by sample_dim.name limit 2
-cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') order by sample_dim.name limit 2
-cube select sample_dim.name, measure3 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') and sample_dim.name != "first" limit 2
-cube select sample_dim.name, measure3 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') and sample_dim.name != "first" limit 2
-cube select sample_dim.name, measure3 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') and sample_dim.name != "first" limit 2
-cube select sample_dim.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') and sample_dim.name != "first" order by sample_dim.name limit 2
-cube select sample_dim.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') and sample_dim.name != "first" order by sample_dim.name desc limit 2
-cube select sample_dim.name, measure2 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') and sample_dim.name != "first" order by sample_dim.name limit 2
-cube select sample_dim.name, measure2 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') and sample_dim.name != "first" order by sample_dim.name desc limit 2
-cube select sample_dim.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') and sample_dim.name != "first" order by sample_dim.name limit 2
-cube select sample_dim.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') and sample_dim.name != "first" order by sample_dim.name desc limit 2
+cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
+cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00')
+cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01')
+cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') order by sample_dim_chain.name
+cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') order by sample_dim_chain.name
+cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') order by sample_dim_chain.name
+cube select sample_dim_chain.name, measure3 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') and sample_dim_chain.name != "first"
+cube select sample_dim_chain.name, measure3 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') and sample_dim_chain.name != "first"
+cube select sample_dim_chain.name, measure3 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') and sample_dim_chain.name != "first"
+cube select sample_dim_chain.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') and sample_dim_chain.name != "first" order by sample_dim_chain.name
+cube select sample_dim_chain.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') and sample_dim_chain.name != "first" order by sample_dim_chain.name desc
+cube select sample_dim_chain.name, measure2 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') and sample_dim_chain.name != "first" order by sample_dim_chain.name
+cube select sample_dim_chain.name, measure2 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') and sample_dim_chain.name != "first" order by sample_dim_chain.name desc
+cube select sample_dim_chain.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') and sample_dim_chain.name != "first" order by sample_dim_chain.name
+cube select sample_dim_chain.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') and sample_dim_chain.name != "first" order by sample_dim_chain.name desc
+cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') order by sample_dim_chain.name limit 2
+cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') order by sample_dim_chain.name limit 2
+cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') order by sample_dim_chain.name limit 2
+cube select sample_dim_chain.name, measure3 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') and sample_dim_chain.name != "first" limit 2
+cube select sample_dim_chain.name, measure3 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') and sample_dim_chain.name != "first" limit 2
+cube select sample_dim_chain.name, measure3 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') and sample_dim_chain.name != "first" limit 2
+cube select sample_dim_chain.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') and sample_dim_chain.name != "first" order by sample_dim_chain.name limit 2
+cube select sample_dim_chain.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') and sample_dim_chain.name != "first" order by sample_dim_chain.name desc limit 2
+cube select sample_dim_chain.name, measure2 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') and sample_dim_chain.name != "first" order by sample_dim_chain.name limit 2
+cube select sample_dim_chain.name, measure2 from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00') and sample_dim_chain.name != "first" order by sample_dim_chain.name desc limit 2
+cube select sample_dim_chain.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') and sample_dim_chain.name != "first" order by sample_dim_chain.name limit 2
+cube select sample_dim_chain.name, measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01') and sample_dim_chain.name != "first" order by sample_dim_chain.name desc limit 2
 cube select dim1, dim2, measure1, measure2 from sample_cube where time_range_in(dt, '2014-06-25-20', '2014-06-26-02')
 cube select dim1, dim2, measure1, measure2 from sample_cube where time_range_in(dt, '2014-06-25-20', '2014-06-26-02') order by dim2
 cube select dim1, dim2, measure1, measure2 from sample_cube where time_range_in(dt, '2014-06-25-20', '2014-06-26-02') order by dim2 desc
@@ -67,23 +67,23 @@ cube select dim1, sum(measure2) from sample_cube where time_range_in(dt, '2014-0
 cube select dim3, max(measure3) from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
 cube select dim3, max(measure3) from sample_cube where time_range_in(dt, '2014-06-25-00', '2014-06-26-00')
 cube select dim3, max(measure3) from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-26-01')
-cube select sample_dim.name, measure4 from sample_cube join sample_dim where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
-cube select sample_dim.name, measure4 from sample_cube left outer join sample_dim where time_range_in(dt, '2014-06-25-00', '2014-06-26-00')
-cube select sample_dim.name, measure4 from sample_cube right outer join sample_dim where time_range_in(dt, '2014-06-24-23', '2014-06-26-01')
-cube select sample_dim.name, measure4 from sample_cube full outer join sample_dim where time_range_in(dt, '2014-06-24-23', '2014-06-26-01')
-select * from (cube select sample_dim.name, measure4 from sample_cube join sample_dim where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') ) a
+cube select sample_dim.name, measure4 from sample_cube join sample_dim on sample_cube.dim3=sample_dim.id where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
+cube select sample_dim.name, measure4 from sample_cube left outer join sample_dim on sample_cube.dim3=sample_dim.id where time_range_in(dt, '2014-06-25-00', '2014-06-26-00')
+cube select sample_dim.name, measure4 from sample_cube right outer join sample_dim on sample_cube.dim3=sample_dim.id where time_range_in(dt, '2014-06-24-23', '2014-06-26-01')
+cube select sample_dim.name, measure4 from sample_cube full outer join sample_dim on sample_cube.dim3=sample_dim.id where time_range_in(dt, '2014-06-24-23', '2014-06-26-01')
+select * from (cube select sample_dim.name, measure4 from sample_cube join sample_dim on sample_cube.dim3=sample_dim.id where time_range_in(dt, '2014-06-24-23', '2014-06-25-00') ) a
 drop table temp1
-create table temp1 as cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
+create table temp1 as cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
 select * from temp1
-insert overwrite local directory '/tmp/example-cube-output' cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
-insert overwrite local directory '/tmp/example-cube-output2' ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' WITH SERDEPROPERTIES ('serialization.null.format'='-NA-','field.delim'=','  ) STORED AS TEXTFILE cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
+insert overwrite local directory '/tmp/example-cube-output' cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
+insert overwrite local directory '/tmp/example-cube-output2' ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' WITH SERDEPROPERTIES ('serialization.null.format'='-NA-','field.delim'=','  ) STORED AS TEXTFILE cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
 drop table temp2
 create table temp2(name string, msr4 float)
-insert overwrite table temp2 cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
+insert overwrite table temp2 cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
 select * from temp2
 drop table temp3
 create table temp3(name string, msr4 float) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' WITH SERDEPROPERTIES ('serialization.null.format'='-NA-','field.delim'=','  ) STORED AS TEXTFILE
-insert overwrite table temp3 cube select sample_dim.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
+insert overwrite table temp3 cube select sample_dim_chain.name, measure4 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')
 select * from temp3
 cube select product_id, store_sales from sales where time_range_in(order_time, '2015-04-11-00', '2015-04-13-00')
 cube select product_id, store_sales from sales where time_range_in(order_time, '2015-04-11-00', '2015-04-13-01')

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-examples/src/main/resources/customer.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/customer.xml b/lens-examples/src/main/resources/customer.xml
index 920fd49..c6182a2 100644
--- a/lens-examples/src/main/resources/customer.xml
+++ b/lens-examples/src/main/resources/customer.xml
@@ -32,9 +32,7 @@
     <dim_attribute name="city_id" type="INT" />
     <dim_attribute name="customer_city_name" type="string" description="City name to which the customer belongs"
       display_string="Customer City">
-      <ref_spec>
-        <chain_ref_column chain_name="customer_city" ref_col="name" />
-      </ref_spec>
+      <chain_ref_column chain_name="customer_city" ref_col="name" />
     </dim_attribute>
     <dim_attribute name="customer_credit_status" type="STRING" start_time='2015-03-01T00:00:00'/>
   </attributes>

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-examples/src/main/resources/dimension-queries.sql
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/dimension-queries.sql b/lens-examples/src/main/resources/dimension-queries.sql
index 7aff639..a5f51d9 100644
--- a/lens-examples/src/main/resources/dimension-queries.sql
+++ b/lens-examples/src/main/resources/dimension-queries.sql
@@ -32,14 +32,14 @@ cube select id,name from sample_dim where name != 'first' order by name desc lim
 cube select count(id) from sample_dim
 cube select count(id) from sample_dim group by name
 cube select count(distinct id) from sample_dim
-cube select sample_dim.name, sample_dim2.name from sample_dim
-cube select sample_dim.name, sample_dim2.name from sample_dim join sample_dim2
-cube select sample_dim.name, sample_dim2.name from sample_dim left outer join sample_dim2
-cube select sample_dim.name, sample_dim2.name from sample_dim right outer join sample_dim2
-cube select sample_dim.name, sample_dim2.name from sample_dim full outer join sample_dim2
+cube select sample_dim.name, sample_dim2_chain.name from sample_dim
+cube select sample_dim.name, sample_dim2.name from sample_dim join sample_dim2 on sample_dim.d2id=sample_dim2.id
+cube select sample_dim.name, sample_dim2.name from sample_dim left outer join sample_dim2 on sample_dim.d2id=sample_dim2.id
+cube select sample_dim.name, sample_dim2.name from sample_dim right outer join sample_dim2 on sample_dim.d2id=sample_dim2.id
+cube select sample_dim.name, sample_dim2.name from sample_dim full outer join sample_dim2 on sample_dim.d2id=sample_dim2.id
 cube select count(id) from sample_dim where name != "first"
 cube select count(distinct id) from sample_dim where name != "first"
-cube select sample_dim.name, sample_dim2.name from sample_dim where sample_dim.name != 'first'
+cube select sample_dim.name, sample_dim2_chain.name from sample_dim where sample_dim.name != 'first'
 cube select id,name from sample_db_dim
 cube select id,name from sample_db_dim where name != 'first'
 cube select id,name from sample_db_dim order by name
@@ -55,7 +55,7 @@ cube select id,name from sample_db_dim where name != 'first' order by name desc
 cube select count(id) from sample_db_dim
 cube select count(id) from sample_db_dim group by name
 cube select count(distinct id) from sample_db_dim
-select * from (cube select sample_dim.name name1, sample_dim2.name name2 from sample_dim where sample_dim.name != 'first') a
+select * from (cube select sample_dim.name name1, sample_dim2_chain.name name2 from sample_dim where sample_dim.name !='first') a
 drop table temp1
 create table temp1 as cube select id,name from sample_dim
 select * from temp1

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-examples/src/main/resources/sales-cube.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sales-cube.xml b/lens-examples/src/main/resources/sales-cube.xml
index 7ec2ec7..e9e48ce 100644
--- a/lens-examples/src/main/resources/sales-cube.xml
+++ b/lens-examples/src/main/resources/sales-cube.xml
@@ -56,17 +56,13 @@
     <dim_attribute name="delivery_city_id" type="INT" />
     <dim_attribute name="customer_city_name" type="string" description="City name to which the customer belongs"
                    display_string="Customer City">
-      <ref_spec>
-        <chain_ref_column chain_name="customer_city" ref_col="name" />
-      </ref_spec>
+      <chain_ref_column chain_name="customer_city" ref_col="name" />
     </dim_attribute>
     <dim_attribute name="production_location">
       <hierarchy>
       <dim_attribute name="production_city_name" type="STRING" description="City name in which the product was produced"
                      display_string="Production City">
-        <ref_spec>
-          <chain_ref_column chain_name="production_city" ref_col="name" />
-        </ref_spec>
+        <chain_ref_column chain_name="production_city" ref_col="name" />
       </dim_attribute>
       <dim_attribute name="production_state" type="STRING" description="State name in which the product was produced"
                      display_string="Production State"/>
@@ -76,9 +72,7 @@
     </dim_attribute>
     <dim_attribute name="delivery_city_name" type="STRING" description="City name to which the product was delivered"
                    display_string="Delivery City">
-      <ref_spec>
-        <chain_ref_column chain_name="delivery_city" ref_col="name" />
-      </ref_spec>
+      <chain_ref_column chain_name="delivery_city" ref_col="name" />
     </dim_attribute>
   </dim_attributes>
   <expressions>

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-examples/src/main/resources/sample-cube.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sample-cube.xml b/lens-examples/src/main/resources/sample-cube.xml
index 7b784ea..3b5abea 100644
--- a/lens-examples/src/main/resources/sample-cube.xml
+++ b/lens-examples/src/main/resources/sample-cube.xml
@@ -33,12 +33,7 @@
   <dim_attributes>
     <dim_attribute name="dim1" type="INT"/>
     <dim_attribute name="dim2" type="INT" start_time='2013-12-01T00:00:00'/>
-    <dim_attribute name="dim3" type="INT" join_key="true">
-      <ref_spec>
-        <table_references>
-          <table_reference table="sample_dim" column="id"/>
-        </table_references>
-      </ref_spec>
+    <dim_attribute name="dim3" type="INT">
     </dim_attribute>
   </dim_attributes>
   <expressions>
@@ -47,4 +42,18 @@
       <expr_spec expr = "measure3 + measure4 + 0.01" end_time='2013-12-12T00:00:00'/>
     </expression>
   </expressions>
+  <join_chains>
+    <join_chain name="sample_dim_chain">
+      <paths>
+        <path>
+          <edges>
+            <edge>
+              <from table="sample_cube" column="dim3" />
+              <to table="sample_dim" column="id" />
+            </edge>
+          </edges>
+        </path>
+      </paths>
+    </join_chain>
+  </join_chains>
 </x_base_cube>

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-examples/src/main/resources/sample-db-only-dimension.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sample-db-only-dimension.xml b/lens-examples/src/main/resources/sample-db-only-dimension.xml
index 4c6bec6..2e8aa64 100644
--- a/lens-examples/src/main/resources/sample-db-only-dimension.xml
+++ b/lens-examples/src/main/resources/sample-db-only-dimension.xml
@@ -25,14 +25,24 @@
     <dim_attribute name="id" type="INT"/>
     <dim_attribute name="name" type="STRING"/>
     <dim_attribute name="detail" type="STRING" start_time='2013-12-01T00:00:00'/>
-    <dim_attribute name="d2id" type="INT" start_time='2013-12-01T00:00:00' join_key="true">
-      <ref_spec>
-        <table_references>
-          <table_reference table="sample_dim2" column="id"/>
-        </table_references>
-      </ref_spec>
-    </dim_attribute>
+    <dim_attribute name="d2id" type="INT" start_time='2013-12-01T00:00:00'/>
   </attributes>
+
+  <join_chains>
+    <join_chain name="sample_dim2_chain">
+      <paths>
+        <path>
+          <edges>
+            <edge>
+              <from table="sample_db_dim" column="d2id" />
+              <to table="sample_dim2" column="id" />
+            </edge>
+          </edges>
+        </path>
+      </paths>
+    </join_chain>
+  </join_chains>
+
   <properties>
     <property name="dimension.sample_db_dim.timed.dimension" value="dt"/>
   </properties>

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-examples/src/main/resources/sample-dimension.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/sample-dimension.xml b/lens-examples/src/main/resources/sample-dimension.xml
index 9b97da7..3c2589c 100644
--- a/lens-examples/src/main/resources/sample-dimension.xml
+++ b/lens-examples/src/main/resources/sample-dimension.xml
@@ -25,14 +25,24 @@
     <dim_attribute name="id" type="INT"/>
     <dim_attribute name="name" type="STRING"/>
     <dim_attribute name="detail" type="STRING" start_time='2013-12-01T00:00:00'/>
-    <dim_attribute name="d2id" type="INT" start_time='2013-12-01T00:00:00' join_key="true">
-      <ref_spec>
-        <table_references>
-          <table_reference table="sample_dim2" column="id"/>
-        </table_references>
-      </ref_spec>
-    </dim_attribute>
+    <dim_attribute name="d2id" type="INT" start_time='2013-12-01T00:00:00'/>
   </attributes>
+
+  <join_chains>
+    <join_chain name="sample_dim2_chain">
+      <paths>
+        <path>
+          <edges>
+            <edge>
+              <from table="sample_dim" column="d2id" />
+              <to table="sample_dim2" column="id" />
+            </edge>
+          </edges>
+        </path>
+      </paths>
+    </join_chain>
+  </join_chains>
+
   <properties>
     <property name="dimension.sample_dim.timed.dimension" value="dt"/>
   </properties>

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
index 817c84c..70323d3 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
@@ -30,7 +30,7 @@ import javax.xml.datatype.XMLGregorianCalendar;
 import org.apache.lens.api.metastore.*;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
-import org.apache.lens.cube.metadata.ReferencedDimAtrribute.ChainRefCol;
+import org.apache.lens.cube.metadata.ReferencedDimAttribute.ChainRefCol;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.hive.metastore.TableType;
@@ -46,6 +46,7 @@ import org.apache.hadoop.mapred.InputFormat;
 
 import com.google.common.base.Optional;
 import com.google.common.collect.Maps;
+
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -159,7 +160,7 @@ public final class JAXBUtils {
    * @param xd
    * @return {@link org.apache.lens.cube.metadata.CubeDimAttribute}
    */
-  public static CubeDimAttribute hiveDimAttrFromXDimAttr(XDimAttribute xd) {
+  public static CubeDimAttribute hiveDimAttrFromXDimAttr(XDimAttribute xd) throws LensException {
     Date startDate = getDateFromXML(xd.getStartTime());
     Date endDate = getDateFromXML(xd.getEndTime());
 
@@ -171,33 +172,12 @@ public final class JAXBUtils {
         hierarchy.add(hiveDimAttrFromXDimAttr(hd));
       }
       hiveDim = new HierarchicalDimAttribute(xd.getName(), xd.getDescription(), hierarchy);
-    } else if (xd.getRefSpec() != null && xd.getRefSpec().getTableReferences() != null
-      && !xd.getRefSpec().getTableReferences().getTableReference().isEmpty()) {
-
-      List<TableReference> dimRefs = new ArrayList<TableReference>(
-        xd.getRefSpec().getTableReferences().getTableReference().size());
-
-      for (XTableReference xRef : xd.getRefSpec().getTableReferences().getTableReference()) {
-        dimRefs.add(new TableReference(xRef.getTable(), xRef.getColumn(), xRef.isMapsToMany()));
-      }
-
-      hiveDim = new ReferencedDimAtrribute(new FieldSchema(xd.getName(), xd.getType().toLowerCase(),
-        xd.getDescription()),
-        xd.getDisplayString(),
-        dimRefs,
-        startDate,
-        endDate,
-        null,
-        xd.isJoinKey(),
-        xd.getNumDistinctValues(),
-        xd.getValues()
-      );
-    } else if (xd.getRefSpec() != null && xd.getRefSpec().getChainRefColumn() != null
-      && !xd.getRefSpec().getChainRefColumn().isEmpty()) {
-      hiveDim = new ReferencedDimAtrribute(new FieldSchema(xd.getName(), xd.getType().toLowerCase(),
+    } else if (xd.getChainRefColumn() != null
+      && !xd.getChainRefColumn().isEmpty()) {
+      hiveDim = new ReferencedDimAttribute(new FieldSchema(xd.getName(), xd.getType().toLowerCase(),
         xd.getDescription()),
         xd.getDisplayString(),
-        getChainRefColumns(xd.getRefSpec().getChainRefColumn()),
+        getChainRefColumns(xd.getChainRefColumn()),
         startDate,
         endDate,
         null,
@@ -334,9 +314,8 @@ public final class JAXBUtils {
     xd.setDisplayString(cd.getDisplayString());
     xd.setStartTime(getXMLGregorianCalendar(cd.getStartTime()));
     xd.setEndTime(getXMLGregorianCalendar(cd.getEndTime()));
-    if (cd instanceof ReferencedDimAtrribute) {
-      ReferencedDimAtrribute rd = (ReferencedDimAtrribute) cd;
-      XDimAttribute.RefSpec refspec = XCF.createXDimAttributeRefSpec();
+    if (cd instanceof ReferencedDimAttribute) {
+      ReferencedDimAttribute rd = (ReferencedDimAttribute) cd;
       if (!rd.getChainRefColumns().isEmpty()) {
         for (ChainRefCol crCol : rd.getChainRefColumns()) {
           XChainColumn xcc = new XChainColumn();
@@ -347,16 +326,9 @@ public final class JAXBUtils {
           } else {
             xcc.setDestTable(baseTable.getChainByName(crCol.getChainName()).getDestTable());
           }
-          refspec.getChainRefColumn().add(xcc);
+          xd.getChainRefColumn().add(xcc);
         }
-        xd.setJoinKey(false);
-      } else {
-        List<TableReference> dimRefs = rd.getReferences();
-        refspec.setTableReferences(new XTableReferences());
-        refspec.getTableReferences().getTableReference().addAll(xTabReferencesFromHiveTabReferences(dimRefs));
-        xd.setJoinKey(rd.useAsJoinKey());
-      }
-      xd.setRefSpec(refspec);
+      }
       xd.setType(rd.getType());
       Optional<Long> numOfDistinctValues = rd.getNumOfDistinctValues();
       if (numOfDistinctValues.isPresent()) {
@@ -663,7 +635,7 @@ public final class JAXBUtils {
     return null;
   }
 
-  public static CubeDimensionTable cubeDimTableFromDimTable(XDimensionTable dimensionTable) {
+  public static CubeDimensionTable cubeDimTableFromDimTable(XDimensionTable dimensionTable) throws LensException {
 
     return new CubeDimensionTable(dimensionTable.getDimensionName(),
       dimensionTable.getTableName(),
@@ -673,7 +645,7 @@ public final class JAXBUtils {
       mapFromXProperties(dimensionTable.getProperties()));
   }
 
-  public static CubeFactTable cubeFactFromFactTable(XFactTable fact) {
+  public static CubeFactTable cubeFactFromFactTable(XFactTable fact) throws LensException {
     List<FieldSchema> columns = fieldSchemaListFromColumns(fact.getColumns());
 
     Map<String, Set<UpdatePeriod>> storageUpdatePeriods = getFactUpdatePeriodsFromStorageTables(


[19/51] [abbrv] lens git commit: LENS-851 : Fix aliasing for non-aggregate functions in multi fact union query

Posted by de...@apache.org.
LENS-851 : Fix aliasing for non-aggregate functions in multi fact union query


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/1b475f2e
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/1b475f2e
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/1b475f2e

Branch: refs/heads/current-release-line
Commit: 1b475f2ea67a275b22248f677a08d5caec7f959a
Parents: d5e923e
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Fri Jan 8 14:53:17 2016 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Fri Jan 8 14:53:17 2016 +0530

----------------------------------------------------------------------
 .../java/org/apache/lens/cube/parse/HQLParser.java   | 15 +++++++++++++++
 .../cube/parse/SingleFactMultiStorageHQLContext.java |  2 +-
 .../org/apache/lens/cube/parse/TestCubeRewriter.java | 15 +++++++++++++++
 3 files changed, 31 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/1b475f2e/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index 6c3d4c3..bfb65c7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -732,6 +732,21 @@ public final class HQLParser {
     return false;
   }
 
+  public static boolean isNonAggregateFunctionAST(ASTNode node) {
+    int exprTokenType = node.getToken().getType();
+    if (exprTokenType == HiveParser.TOK_FUNCTION || exprTokenType == HiveParser.TOK_FUNCTIONDI
+      || exprTokenType == HiveParser.TOK_FUNCTIONSTAR) {
+      assert (node.getChildCount() != 0);
+      if (node.getChild(0).getType() == HiveParser.Identifier) {
+        String functionName = BaseSemanticAnalyzer.unescapeIdentifier(node.getChild(0).getText());
+        if (FunctionRegistry.getGenericUDAFResolver(functionName) == null) {
+          return true;
+        }
+      }
+    }
+    return false;
+  }
+
   /**
    * @param node an ASTNode
    * @return true when input node is a SELECT AST Node. Otherwise, false.

http://git-wip-us.apache.org/repos/asf/lens/blob/1b475f2e/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
index e531e6b..ac56328 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
@@ -185,7 +185,7 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
       outerAST.addChild(dotAST);
       innerToOuterASTs.put(new HashableASTNode(innerSelectASTWithoutAlias), outerAST);
       return outerAST;
-    } else if (isTableColumnAST(astNode)) {
+    } else if (isTableColumnAST(astNode) || isNonAggregateFunctionAST(astNode)) {
       if (innerToOuterASTs.containsKey(new HashableASTNode(astNode))) {
         return innerToOuterASTs.get(new HashableASTNode(astNode));
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/1b475f2e/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index f02cdb0..61fb73c 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -417,6 +417,21 @@ public class TestCubeRewriter extends TestQueryRewrite {
       }
       conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, true);
 
+      hqlQuery = rewrite("select ascii(cityid) as `City ID`, msr8, msr7 as `Third measure` "
+        + "from testCube where cityid = 'a' and zipcode = 'b' and " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
+
+      expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
+        "SELECT testcube.alias0 as `City ID`, sum(testcube.alias1) + max(testcube.alias2), "
+          + "case when sum(testcube.alias1) = 0 then 0 else sum(testcube.alias3)/sum(testcube.alias1) end "
+          + "as `Third Measure`",
+        null, "group by testcube.alias0",
+        "select ascii(testcube.cityid) as `alias0`, sum(testcube.msr2) as `alias1`, "
+          + "max(testcube.msr3) as `alias2`, "
+          + "sum(case when testcube.cityid = 'x' then testcube.msr21 else testcube.msr22 end) as `alias3`",
+        "testcube.cityid = 'a' and testcube.zipcode = 'b'", "group by ascii(testcube.cityid)");
+
+      compareQueries(hqlQuery, expected);
+
       hqlQuery = rewrite("select cityid as `City ID`, msr8, msr7 as `Third measure` "
         + "from testCube where cityid = 'a' and zipcode = 'b' and " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
 


[13/51] [abbrv] lens git commit: LENS-750 : Add Hive error codes for Semantic and Authorization exceptions

Posted by de...@apache.org.
LENS-750 : Add Hive error codes for Semantic and Authorization exceptions


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/bf1053b4
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/bf1053b4
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/bf1053b4

Branch: refs/heads/current-release-line
Commit: bf1053b4a1081bd3f07d6b26337e68586404e530
Parents: c179081
Author: Deepak Barr <de...@apache.org>
Authored: Fri Dec 18 12:58:13 2015 +0530
Committer: Deepak Kumar Barr <de...@apache.org>
Committed: Fri Dec 18 12:58:13 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/lens-errors.conf    | 16 +++++++-
 .../org/apache/lens/driver/hive/HiveDriver.java | 17 +++++++--
 .../lens/driver/hive/LensHiveErrorCode.java     | 36 ++++++++++++++++++
 .../server/query/QueryAPIErrorResponseTest.java |  5 ++-
 .../lens/server/query/TestQueryService.java     | 40 ++++++++++++++++++--
 5 files changed, 103 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/bf1053b4/lens-api/src/main/resources/lens-errors.conf
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/lens-errors.conf b/lens-api/src/main/resources/lens-errors.conf
index c880543..06960a0 100644
--- a/lens-api/src/main/resources/lens-errors.conf
+++ b/lens-api/src/main/resources/lens-errors.conf
@@ -317,6 +317,20 @@ lensCubeErrorsForMetastore = [
 
 ]
 
+lensHiveErrors = [
+  {
+    errorCode = 4001
+    httpStatusCode = ${BAD_REQUEST}
+    errorMsg = "Semantic Error : %s"
+  }
+
+  {
+    errorCode = 4002
+    httpStatusCode = ${INTERNAL_SERVER_ERROR}
+    errorMsg = "Hive Error : %s"
+  }
+]
+
 lensCubeErrors = ${lensCubeErrorsForQuery}${lensCubeErrorsForMetastore}
 
 # Overriding errors in lens-errors.conf via lens-errors-override.conf:
@@ -350,4 +364,4 @@ lensCubeErrors = ${lensCubeErrorsForQuery}${lensCubeErrorsForMetastore}
 # Lens server and Lens client are only aware of errors array. They are not aware of any other array defined in
 # error configuration files. Hence an errors array is prepared which is a concatenation of all other error arrays.
 
-errors = ${lensCommonErrors}${lensServerErrors}${lensCubeErrors}
+errors = ${lensCommonErrors}${lensServerErrors}${lensCubeErrors}${lensHiveErrors}

http://git-wip-us.apache.org/repos/asf/lens/blob/bf1053b4/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index 7391f47..c7ef8f1 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -18,6 +18,7 @@
  */
 package org.apache.lens.driver.hive;
 
+import static org.apache.lens.driver.hive.LensHiveErrorCode.*;
 import static org.apache.lens.server.api.util.LensUtil.getImplementations;
 
 import java.io.ByteArrayInputStream;
@@ -508,6 +509,7 @@ public class HiveDriver extends AbstractLensDriver {
   // assuming this is only called for executing explain/insert/set/delete/etc... queries which don't ask to fetch data.
   public LensResultSet execute(QueryContext ctx) throws LensException {
     OperationHandle op = null;
+    LensResultSet result = null;
     try {
       addPersistentPath(ctx);
       Configuration qdconf = ctx.getDriverConf(this);
@@ -525,24 +527,24 @@ public class HiveDriver extends AbstractLensDriver {
       if (status.getState() == OperationState.ERROR) {
         throw new LensException("Unknown error while running query " + ctx.getUserQuery());
       }
-      LensResultSet result = createResultSet(ctx, true);
+      result = createResultSet(ctx, true);
       // close the query immediately if the result is not inmemory result set
       if (result == null || !(result instanceof HiveInMemoryResultSet)) {
         closeQuery(ctx.getQueryHandle());
       }
       // remove query handle from hiveHandles even in case of inmemory result set
       hiveHandles.remove(ctx.getQueryHandle());
-      return result;
     } catch (IOException e) {
       throw new LensException("Error adding persistent path", e);
     } catch (HiveSQLException hiveErr) {
       handleHiveServerError(ctx, hiveErr);
-      throw new LensException("Error executing query", hiveErr);
+      handleHiveSQLException(hiveErr);
     } finally {
       if (null != op) {
         opHandleToSession.remove(op);
       }
     }
+    return result;
   }
 
   /*
@@ -569,10 +571,17 @@ public class HiveDriver extends AbstractLensDriver {
       throw new LensException("Error adding persistent path", e);
     } catch (HiveSQLException e) {
       handleHiveServerError(ctx, e);
-      throw new LensException("Error executing async query", e);
+      handleHiveSQLException(e);
     }
   }
 
+  private LensException handleHiveSQLException(HiveSQLException ex) throws LensException {
+    if (ex.getMessage().contains("SemanticException")) {
+      throw new LensException(SEMANTIC_ERROR.getLensErrorInfo(), ex, ex.getMessage());
+    }
+    throw new LensException(HIVE_ERROR.getLensErrorInfo(), ex, ex.getMessage());
+  }
+
   /*
    * (non-Javadoc)
    *

http://git-wip-us.apache.org/repos/asf/lens/blob/bf1053b4/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/LensHiveErrorCode.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/LensHiveErrorCode.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/LensHiveErrorCode.java
new file mode 100644
index 0000000..3bac9e7
--- /dev/null
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/LensHiveErrorCode.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.driver.hive;
+
+import org.apache.lens.server.api.LensErrorInfo;
+
+public enum LensHiveErrorCode {
+
+  SEMANTIC_ERROR(4001, 10000), HIVE_ERROR(4002, 10000);
+
+  public LensErrorInfo getLensErrorInfo() {
+    return this.errorInfo;
+  }
+
+  LensHiveErrorCode(final int code, final int weight) {
+    this.errorInfo = new LensErrorInfo(code, weight, name());
+  }
+
+  private final LensErrorInfo errorInfo;
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/bf1053b4/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java b/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
index 18a8c8d..69c3f46 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
@@ -160,7 +160,8 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
     final String testQuery = "select * from non_existing_table";
     Response response = estimate(target(), Optional.of(sessionId), Optional.of(testQuery));
 
-    final String expectedErrMsg = "Internal Server Error.";
+    final String expectedErrMsg = "Semantic Error : Error while compiling statement: "
+      + "FAILED: SemanticException [Error 10001]: Line 1:31 Table not found 'non_existing_table'";
 
     LensErrorTO childError1 = LensErrorTO.composedOf(INTERNAL_SERVER_ERROR.getValue(),
       expectedErrMsg, MOCK_STACK_TRACE);
@@ -170,7 +171,7 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
     LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(INTERNAL_SERVER_ERROR.getValue(),
         expectedErrMsg, MOCK_STACK_TRACE, Arrays.asList(childError1, childError2));
 
-    ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(Status.INTERNAL_SERVER_ERROR,
+    ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(Status.BAD_REQUEST,
       expectedLensErrorTO);
 
     expectedData.verify(response);

http://git-wip-us.apache.org/repos/asf/lens/blob/bf1053b4/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index efef358..82afcdc 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -49,6 +49,7 @@ import org.apache.lens.api.result.LensErrorTO;
 import org.apache.lens.api.result.QueryCostTO;
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.driver.hive.HiveDriver;
+import org.apache.lens.driver.hive.LensHiveErrorCode;
 import org.apache.lens.server.LensJerseyTest;
 import org.apache.lens.server.LensServerTestUtil;
 import org.apache.lens.server.LensServices;
@@ -252,7 +253,7 @@ public class TestQueryService extends LensJerseyTest {
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
       MediaType.APPLICATION_XML_TYPE));
     final Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
-    assertEquals(response.getStatus(), INTERNAL_SERVER_ERROR.getStatusCode());
+    assertEquals(response.getStatus(), BAD_REQUEST.getStatusCode());
   }
 
   /**
@@ -416,7 +417,7 @@ public class TestQueryService extends LensJerseyTest {
 
     final Response responseExplain = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
 
-    assertEquals(responseExplain.getStatus(), INTERNAL_SERVER_ERROR.getStatusCode());
+    assertEquals(responseExplain.getStatus(), BAD_REQUEST.getStatusCode());
 
     // Test explain and prepare
     final WebTarget ptarget = target().path("queryapi/preparedqueries");
@@ -433,7 +434,38 @@ public class TestQueryService extends LensJerseyTest {
     final Response responseExplainAndPrepare = target.request().post(
       Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
 
-    assertEquals(responseExplainAndPrepare.getStatus(), INTERNAL_SERVER_ERROR.getStatusCode());
+    assertEquals(responseExplainAndPrepare.getStatus(), BAD_REQUEST.getStatusCode());
+  }
+
+  /**
+   * Test semantic error for hive query on non-existent table.
+   *
+   * @throws IOException          Signals that an I/O exception has occurred.
+   * @throws InterruptedException the interrupted exception
+   */
+  @Test
+  public void testHiveSemanticFailure() throws InterruptedException, IOException {
+    final WebTarget target = target().path("queryapi/queries");
+
+    final FormDataMultiPart mp = new FormDataMultiPart();
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
+      MediaType.APPLICATION_XML_TYPE));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), " select ID from NOT_EXISTS"));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
+      MediaType.APPLICATION_XML_TYPE));
+
+    Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
+    LensAPIResult result = response.readEntity(LensAPIResult.class);
+    List<LensErrorTO> childErrors = result.getLensErrorTO().getChildErrors();
+    boolean hiveSemanticErrorExists=false;
+    for (LensErrorTO error : childErrors) {
+      if (error.getCode() == LensHiveErrorCode.SEMANTIC_ERROR.getLensErrorInfo().getErrorCode()) {
+        hiveSemanticErrorExists = true;
+        break;
+      }
+    }
+    assertTrue(hiveSemanticErrorExists);
   }
 
   // post to preparedqueries
@@ -1154,7 +1186,7 @@ public class TestQueryService extends LensJerseyTest {
       MediaType.APPLICATION_XML_TYPE));
 
     Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
-    assertEquals(response.getStatus(), INTERNAL_SERVER_ERROR.getStatusCode());
+    assertEquals(response.getStatus(), BAD_REQUEST.getStatusCode());
   }
 
   /**


[25/51] [abbrv] lens git commit: LENS-735 : Remove accepting TableReferences for ReferenceDimAttribute

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index ad20ae1..4366938 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -31,7 +31,7 @@ import java.util.*;
 
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
-import org.apache.lens.cube.metadata.ReferencedDimAtrribute.ChainRefCol;
+import org.apache.lens.cube.metadata.ReferencedDimAttribute.ChainRefCol;
 import org.apache.lens.cube.metadata.timeline.EndsAndHolesPartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.PartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.StoreAllPartitionTimeline;
@@ -519,14 +519,10 @@ public class CubeTestSetup {
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("d_time", "timestamp", "d time")));
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("processing_time", "timestamp", "processing time")));
     List<CubeDimAttribute> locationHierarchy = new ArrayList<CubeDimAttribute>();
-    locationHierarchy.add(new ReferencedDimAtrribute(new FieldSchema("zipcode", "int", "zip"), "Zip refer",
-      new TableReference("zipdim", "code")));
-    locationHierarchy.add(new ReferencedDimAtrribute(new FieldSchema("cityid", "int", "city"), "City refer",
-      new TableReference("citydim", "id")));
-    locationHierarchy.add(new ReferencedDimAtrribute(new FieldSchema("stateid", "int", "state"), "State refer",
-      new TableReference("statedim", "id")));
-    locationHierarchy.add(new ReferencedDimAtrribute(new FieldSchema("countryid", "int", "country"), "Country refer",
-      new TableReference("countrydim", "id")));
+    locationHierarchy.add(new BaseDimAttribute(new FieldSchema("zipcode", "int", "zip")));
+    locationHierarchy.add(new BaseDimAttribute(new FieldSchema("cityid", "int", "city")));
+    locationHierarchy.add(new BaseDimAttribute(new FieldSchema("stateid", "int", "state")));
+    locationHierarchy.add(new BaseDimAttribute(new FieldSchema("countryid", "int", "country")));
     List<String> regions = Arrays.asList("APAC", "EMEA", "USA");
     locationHierarchy.add(new BaseDimAttribute(new FieldSchema("regionname", "string", "region"), "regionname", null,
       null, null, null, regions));
@@ -535,66 +531,41 @@ public class CubeTestSetup {
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("dim1", "string", "basedim")));
     // Added for ambiguity test
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("ambigdim1", "string", "used in testColumnAmbiguity")));
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim2", "int", "ref dim"), "Dim2 refer",
-      new TableReference("testdim2", "id")));
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("cdim2", "int", "ref dim"), "Dim2 refer",
-      new TableReference("cycledim1", "id"), NOW, null, null));
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("urdimid", "int", "ref dim"), "urdim refer",
-      new TableReference("unreachableDim", "id"), null, null, null, false, 10L));
-
+    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("dim2", "int", "ref dim"), "dim2 refer",
+      "dim2chain", "id", null, null, 0.0));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("cdim2", "int", "ref dim"), "Dim2 refer", NOW, null, null));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("urdimid", "int", "ref dim"), "urdim refer",
+      null, null, 10.0));
+    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("unreachableName", "string", ""), "urdim name",
+      "unreachableDim_chain", "name", null, null, 10.0));
     // denormalized reference
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim2big1", "bigint", "ref dim"), "Dim2 refer",
-      new TableReference("testdim2", "bigid1")));
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim2big2", "bigint", "ref dim"), "Dim2 refer",
-      new TableReference("testdim2", "bigid2")));
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim2bignew", "bigint", "ref dim"), "Dim2 refer",
-      new TableReference("testdim2", "bigidnew"), NOW, null, null));
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("test_time_dim_hour_id", "int", "ref dim"),
-      "Timedim reference", new TableReference("hourdim", "id"), null, null, null));
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("test_time_dim_day_id", "int", "ref dim"),
-      "Timedim reference", new TableReference("daydim", "id"), null, null, null));
-    // not creating test_time_dim_hour_id2 ref dim attribute to avoid the reference in schema graph for other paths
-    // the column is only defined in chain
+    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("dim2big1", "bigint", "ref dim"), "dim2 refer",
+      "dim2chain", "bigid1", null, null, 0.0));
+    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("dim2big2", "bigint", "ref dim"), "dim2 refer",
+      "dim2chain", "bigid2", null, null, 0.0));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("dim2bignew", "bigint", "ref dim"), "Dim2 refer",
+      NOW, null, null));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("test_time_dim_hour_id", "int", "ref dim"),
+      "Timedim reference", null, null, null));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("test_time_dim_day_id", "int", "ref dim"),
+      "Timedim reference", null, null, null));
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("test_time_dim_hour_id2", "int", "ref dim")));
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("test_time_dim_day_id2", "int", "ref dim")));
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("testdim3id", "int", "direct id to testdim3"),
-      "Timedim reference", new TableReference("testdim3", "id"), null, null, null));
-
-    List<TableReference> references = new ArrayList<TableReference>();
-    references.add(new TableReference("daydim", "full_date"));
-    references.add(new TableReference("hourdim", "full_hour"));
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("test_time_dim", "date", "ref dim"),
-      "Timedim full date", references, null, null, null, false));
+    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("testDim3id", "string", "direct id to testdim3"),
+      "dim3 refer", "dim3chain", "id", null, null, 0.0));
+    List<ChainRefCol> references = new ArrayList<>();
+    references.add(new ChainRefCol("timedatechain1", "full_date"));
+    references.add(new ChainRefCol("timehourchain1", "full_hour"));
+    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("test_time_dim", "date", "ref dim"),
+      "Timedim full date", references, null, null, null, null));
     List<ChainRefCol> chainRefs = new ArrayList<>();
-    chainRefs.add(new ChainRefCol("timehourchain", "full_hour"));
-    chainRefs.add(new ChainRefCol("timedatechain", "full_date"));
-    cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("test_time_dim2", "date", "chained dim"),
+    chainRefs.add(new ChainRefCol("timehourchain2", "full_hour"));
+    chainRefs.add(new ChainRefCol("timedatechain2", "full_date"));
+    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("test_time_dim2", "date", "chained dim"),
       "Timedim full date", chainRefs, null, null, null, null));
 
-    Set<JoinChain> joinchains = new HashSet<JoinChain>();
-    JoinChain timeHourChain = new JoinChain("timehourchain", "time chain", "time dim thru hour dim");
-    List<TableReference> paths = new ArrayList<TableReference>();
-    paths.add(new TableReference("testcube", "test_time_dim_hour_id2"));
-    paths.add(new TableReference("hourdim", "id"));
-    timeHourChain.addPath(paths);
-    joinchains.add(timeHourChain);
-
-    JoinChain timeDateChain = new JoinChain("timedatechain", "time chain", "time dim thru date dim");
-    paths = new ArrayList<TableReference>();
-    paths.add(new TableReference("testcube", "test_time_dim_day_id2"));
-    paths.add(new TableReference("daydim", "id"));
-    timeDateChain.addPath(paths);
-    joinchains.add(timeDateChain);
-    joinchains.add(new JoinChain("cubeState", "cube-state", "state thru cube") {
-      {
-        addPath(new ArrayList<TableReference>() {
-          {
-            add(new TableReference("basecube", "stateid"));
-            add(new TableReference("statedim", "id"));
-          }
-        });
-      }
-    });
+    Map<String, JoinChain> joinChains = new HashMap<>();
+    addCubeChains(joinChains, TEST_CUBE_NAME);
 
     exprs = new HashSet<ExprColumn>();
     exprs.add(new ExprColumn(new FieldSchema("avgmsr", "double", "avg measure"), "Avg Msr", "avg(msr1 + msr2)"));
@@ -629,9 +600,9 @@ public class CubeTestSetup {
     exprs.add(new ExprColumn(new FieldSchema("booleancut", "boolean", "a boolean expression"), "Boolean cut",
       "(dim1 != 'x' AND dim2 != 10)"));
     exprs.add(new ExprColumn(new FieldSchema("substrexpr", "string", "a sub-string expression"), "Substr expr",
-      new ExprSpec("substr(dim1, 3))", null, null), new ExprSpec("substr(ascii(testdim2.name), 3)", null, null)));
+      new ExprSpec("substr(dim1, 3))", null, null), new ExprSpec("substr(ascii(dim2chain.name), 3)", null, null)));
     exprs.add(new ExprColumn(new FieldSchema("substrexprdim2", "string", "a sub-string expression"), "Substr expr",
-      new ExprSpec("substr(dim2, 3))", null, null), new ExprSpec("substr(ascii(testdim2.name), 3)", null, null)));
+      new ExprSpec("substr(dim2, 3))", null, null), new ExprSpec("substr(ascii(dim2chain.name), 3)", null, null)));
     exprs.add(new ExprColumn(new FieldSchema("indiasubstr", "boolean", "nested sub string expression"), "Nested expr",
       "substrexpr = 'INDIA'"));
     exprs.add(new ExprColumn(new FieldSchema("refexpr", "string", "expression which facts and dimensions"),
@@ -641,9 +612,9 @@ public class CubeTestSetup {
     exprs.add(new ExprColumn(new FieldSchema("newexpr", "string", "expression which non existing colun"),
       "new measure expr", "myfun(newmeasure)"));
     exprs.add(new ExprColumn(new FieldSchema("cityAndState", "String", "city and state together"), "City and State",
-      "concat(citydim.name, \":\", statedim.name)"));
+      "concat(cubecity.name, \":\", cubestate.name)"));
     exprs.add(new ExprColumn(new FieldSchema("cityStateName", "String", "city state"), "City State",
-      "concat('CityState:', citydim.statename)"));
+      "concat('CityState:', cubecity.statename)"));
     exprs.add(new ExprColumn(new FieldSchema("cubeStateName", "String", "statename from cubestate"), "CubeState Name",
       "substr(cubestate.name, 5)"));
     exprs.add(new ExprColumn(new FieldSchema("substrdim2big1", "String", "substr of dim2big1"), "dim2big1 substr",
@@ -660,7 +631,8 @@ public class CubeTestSetup {
     cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "pt", "pt");
     cubeProperties.put(MetastoreConstants.TIMEDIM_RELATION + "d_time", "test_time_dim+[-10 days,10 days]");
 
-    client.createCube(TEST_CUBE_NAME, cubeMeasures, cubeDimensions, exprs, joinchains, cubeProperties);
+    client.createCube(TEST_CUBE_NAME, cubeMeasures, cubeDimensions, exprs, Sets.newHashSet(joinChains.values()),
+      cubeProperties);
 
     Set<String> measures = new HashSet<String>();
     measures.add("msr1");
@@ -669,6 +641,9 @@ public class CubeTestSetup {
     Set<String> dimensions = new HashSet<String>();
     dimensions.add("dim1");
     dimensions.add("dim2");
+    dimensions.add("dim2big1");
+    dimensions.add("dim2big2");
+    dimensions.add("dim2bignew");
     // Try creating derived cube with non existant dim/measures
     try{
       client.createDerivedCube(TEST_CUBE_NAME, DERIVED_CUBE_NAME,
@@ -683,10 +658,253 @@ public class CubeTestSetup {
       measures, dimensions, new HashMap<String, String>(), 5L);
   }
 
+  private void addCubeChains(Map<String, JoinChain> joinChains, final String cubeName) {
+    joinChains.put("timehourchain1", new JoinChain("timehourchain1", "time chain", "time dim thru hour dim") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "test_time_dim_hour_id"));
+            add(new TableReference("hourdim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.put("timedatechain1", new JoinChain("timedatechain1", "time chain", "time dim thru date dim") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "test_time_dim_day_id"));
+            add(new TableReference("daydim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.put("timehourchain2", new JoinChain("timehourchain2", "time chain", "time dim thru hour dim") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "test_time_dim_hour_id2"));
+            add(new TableReference("hourdim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.put("timedatechain2", new JoinChain("timedatechain2", "time chain", "time dim thru date dim") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "test_time_dim_day_id2"));
+            add(new TableReference("daydim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.put("cubeCity", new JoinChain("cubeCity", "cube-city", "city thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "cityid"));
+            add(new TableReference("citydim", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2"));
+            add(new TableReference("testdim2", "id"));
+            add(new TableReference("testdim2", "cityid"));
+            add(new TableReference("citydim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.put("cubeState",  new JoinChain("cubeState", "cube-state", "state thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "stateid"));
+            add(new TableReference("statedim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.put("cubeZip",  new JoinChain("cubeZip", "cube-zip", "Zipcode thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "zipcode"));
+            add(new TableReference("zipdim", "code"));
+          }
+        });
+      }
+    });
+    joinChains.put("cubeCountry",  new JoinChain("cubeCountry", "cube-country", "country thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "countryid"));
+            add(new TableReference("countrydim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.put("dim2chain", new JoinChain("dim2chain", "cube-testdim2", "testdim2 thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2"));
+            add(new TableReference("testdim2", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2big1"));
+            add(new TableReference("testdim2", "bigid1"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2big2"));
+            add(new TableReference("testdim2", "bigid2"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2bignew"));
+            add(new TableReference("testdim2", "bigidnew"));
+          }
+        });
+      }
+    });
+    joinChains.put("dim3chain", new JoinChain("dim3chain", "cube-testdim3", "cyclicdim thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2"));
+            add(new TableReference("testdim2", "id"));
+            add(new TableReference("testdim2", "testdim3id"));
+            add(new TableReference("testdim3", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2big1"));
+            add(new TableReference("testdim2", "bigid1"));
+            add(new TableReference("testdim2", "testdim3id"));
+            add(new TableReference("testdim3", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2big2"));
+            add(new TableReference("testdim2", "bigid2"));
+            add(new TableReference("testdim2", "testdim3id"));
+            add(new TableReference("testdim3", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2bignew"));
+            add(new TableReference("testdim2", "bigidnew"));
+            add(new TableReference("testdim2", "testdim3id"));
+            add(new TableReference("testdim3", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "testdim3id"));
+            add(new TableReference("testdim3", "id"));
+          }
+        });
+      }
+    });
+    joinChains.put("dim4chain", new JoinChain("dim4chain", "cube-testdim3", "cyclicdim thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2"));
+            add(new TableReference("testdim2", "id"));
+            add(new TableReference("testdim2", "testdim3id"));
+            add(new TableReference("testdim3", "id"));
+            add(new TableReference("testdim3", "testdim4id"));
+            add(new TableReference("testdim4", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2big1"));
+            add(new TableReference("testdim2", "bigid1"));
+            add(new TableReference("testdim2", "testdim3id"));
+            add(new TableReference("testdim3", "id"));
+            add(new TableReference("testdim3", "testdim4id"));
+            add(new TableReference("testdim4", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2big2"));
+            add(new TableReference("testdim2", "bigid2"));
+            add(new TableReference("testdim2", "testdim3id"));
+            add(new TableReference("testdim3", "id"));
+            add(new TableReference("testdim3", "testdim4id"));
+            add(new TableReference("testdim4", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "dim2bignew"));
+            add(new TableReference("testdim2", "bigidnew"));
+            add(new TableReference("testdim2", "testdim3id"));
+            add(new TableReference("testdim3", "id"));
+            add(new TableReference("testdim3", "testdim4id"));
+            add(new TableReference("testdim4", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "testdim3id"));
+            add(new TableReference("testdim3", "id"));
+            add(new TableReference("testdim3", "testdim4id"));
+            add(new TableReference("testdim4", "id"));
+          }
+        });
+      }
+    });
+    joinChains.put("cdimChain", new JoinChain("cdimChain", "cube-cyclicdim", "cyclicdim thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "cdim2"));
+            add(new TableReference("cycledim1", "id"));
+          }
+        });
+      }
+    });
+    joinChains.put("unreachableDim_chain", new JoinChain("unreachableDim_chain", "cube-unreachableDim",
+      "unreachableDim thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "urdimid"));
+            add(new TableReference("unreachableDim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.put("cubeCountry",  new JoinChain("cubeCountry", "cube-country", "country thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "countryid"));
+            add(new TableReference("countrydim", "id"));
+          }
+        });
+      }
+    });
+  }
   private void createBaseAndDerivedCubes(CubeMetastoreClient client)
     throws HiveException, ParseException, LensException {
-    Set<CubeMeasure> cubeMeasures2 = new HashSet<CubeMeasure>(cubeMeasures);
-    Set<CubeDimAttribute> cubeDimensions2 = new HashSet<CubeDimAttribute>(cubeDimensions);
+    Set<CubeMeasure> cubeMeasures2 = new HashSet<>(cubeMeasures);
+    Set<CubeDimAttribute> cubeDimensions2 = new HashSet<>(cubeDimensions);
     cubeMeasures2.add(new ColumnMeasure(new FieldSchema("msr11", "int", "first measure")));
     cubeMeasures2.add(new ColumnMeasure(new FieldSchema("msr12", "float", "second measure"), "Measure2", null, "SUM",
       "RS"));
@@ -696,15 +914,15 @@ public class CubeTestSetup {
       "COUNT", null));
 
     cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("dim11", "string", "basedim")));
-    cubeDimensions2.add(new ReferencedDimAtrribute(new FieldSchema("dim12", "int", "ref dim"), "Dim2 refer",
-      new TableReference("testdim2", "id")));
-    cubeDimensions2.add(new ReferencedDimAtrribute(new FieldSchema("dim22", "int", "ref dim"), "Dim2 refer",
-      "dim2chain", "id", null, null, null));
+    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("dim12", "int", "ref dim"), "Dim2 refer",
+      "dim2chain", "id", null, null, null)); // used as key in the chains
+    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("dim22", "int", "ref dim"), "Dim2 refer",
+      "dim2chain", "id", null, null, null)); // not used as key in the chains
     cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("userid", "int", "userid")));
     cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("xuserid", "int", "userid")));
     cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("yuserid", "int", "userid")));
 
-    Map<String, String> cubeProperties = new HashMap<String, String>();
+    Map<String, String> cubeProperties = new HashMap<>();
     cubeProperties.put(MetastoreUtil.getCubeTimedDimensionListKey(BASE_CUBE_NAME),
       "d_time,pt,it,et,test_time_dim,test_time_dim2");
     cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "test_time_dim", "ttd");
@@ -717,166 +935,170 @@ public class CubeTestSetup {
     cubeProperties.put(MetastoreConstants.TIMEDIM_RELATION + "processing_time", "test_time_dim+[-5 days,5 days]");
     cubeProperties.put(MetastoreConstants.CUBE_ALL_FIELDS_QUERIABLE, "false");
 
-    Set<JoinChain> joinchains = new HashSet<JoinChain>() {
+    Map<String, JoinChain> joinChainMap = new HashMap<>();
+    addCubeChains(joinChainMap, "basecube");
+    // update new paths
+    joinChainMap.get("dim2chain").addPath(new ArrayList<TableReference>() {
       {
-        add(new JoinChain("cityState", "city-state", "state thru city") {
-          {
-            addPath(new ArrayList<TableReference>() {
-              {
-                add(new TableReference("basecube", "cityid"));
-                add(new TableReference("citydim", "id"));
-                add(new TableReference("citydim", "stateid"));
-                add(new TableReference("statedim", "id"));
-              }
-            });
-            addPath(new ArrayList<TableReference>() {
-              {
-                add(new TableReference("basecube", "cityid"));
-                add(new TableReference("citydim", "id"));
-                add(new TableReference("citydim", "statename"));
-                add(new TableReference("statedim", "name"));
-              }
-            });
-          }
-        });
-        add(new JoinChain("cubeState", "cube-state", "state thru cube") {
+        add(new TableReference("basecube", "dim12"));
+        add(new TableReference("testdim2", "id"));
+      }
+    });
+    joinChainMap.get("dim3chain").addPath(new ArrayList<TableReference>() {
+      {
+        add(new TableReference("basecube", "dim12"));
+        add(new TableReference("testdim2", "id"));
+        add(new TableReference("testdim2", "testdim3id"));
+        add(new TableReference("testdim3", "id"));
+      }
+    });
+    joinChainMap.get("dim4chain").addPath(new ArrayList<TableReference>() {
+      {
+        add(new TableReference("basecube", "dim12"));
+        add(new TableReference("testdim2", "id"));
+        add(new TableReference("testdim2", "testdim3id"));
+        add(new TableReference("testdim3", "id"));
+        add(new TableReference("testdim3", "testdim4id"));
+        add(new TableReference("testdim4", "id"));
+      }
+    });
+    Set<JoinChain> joinChains = Sets.newHashSet(joinChainMap.values());
+    joinChains.add(new JoinChain("cityState", "city-state", "state thru city") {
+      {
+        addPath(new ArrayList<TableReference>() {
           {
-            addPath(new ArrayList<TableReference>() {
-              {
-                add(new TableReference("basecube", "stateid"));
-                add(new TableReference("statedim", "id"));
-              }
-            });
+            add(new TableReference("basecube", "cityid"));
+            add(new TableReference("citydim", "id"));
+            add(new TableReference("citydim", "stateid"));
+            add(new TableReference("statedim", "id"));
           }
         });
-        add(new JoinChain("cityZip", "city-zip", "zip thru city") {
+        addPath(new ArrayList<TableReference>() {
           {
-            addPath(new ArrayList<TableReference>() {
-              {
-                add(new TableReference("basecube", "cityid"));
-                add(new TableReference("citydim", "id"));
-                add(new TableReference("citydim", "zipcode"));
-                add(new TableReference("zipdim", "code"));
-              }
-            });
+            add(new TableReference("basecube", "cityid"));
+            add(new TableReference("citydim", "id"));
+            add(new TableReference("citydim", "statename"));
+            add(new TableReference("statedim", "name"));
           }
         });
-        add(new JoinChain("cubeStateCountry", "cube-state-country", "country through state") {
+      }
+    });
+    joinChains.add(new JoinChain("cityZip", "city-zip", "zip thru city") {
+      {
+        addPath(new ArrayList<TableReference>() {
           {
-            addPath(new ArrayList<TableReference>() {
-              {
-                add(new TableReference("basecube", "stateid"));
-                add(new TableReference("statedim", "id"));
-                add(new TableReference("statedim", "countryid"));
-                add(new TableReference("countrydim", "id"));
-              }
-            });
+            add(new TableReference("basecube", "cityid"));
+            add(new TableReference("citydim", "id"));
+            add(new TableReference("citydim", "zipcode"));
+            add(new TableReference("zipdim", "code"));
           }
         });
-        add(new JoinChain("cubeCityStateCountry", "cube-city-state-country", "country through state through city") {
+      }
+    });
+    joinChains.add(new JoinChain("cubeStateCountry", "cube-state-country", "country through state") {
+      {
+        addPath(new ArrayList<TableReference>() {
           {
-            addPath(new ArrayList<TableReference>() {
-              {
-                add(new TableReference("basecube", "cityid"));
-                add(new TableReference("citydim", "id"));
-                add(new TableReference("citydim", "stateid"));
-                add(new TableReference("statedim", "id"));
-                add(new TableReference("statedim", "countryid"));
-                add(new TableReference("countrydim", "id"));
-              }
-            });
+            add(new TableReference("basecube", "stateid"));
+            add(new TableReference("statedim", "id"));
+            add(new TableReference("statedim", "countryid"));
+            add(new TableReference("countrydim", "id"));
           }
         });
-        add(new JoinChain("dim2chain", "dim2chain", "dim2chain") {
+      }
+    });
+    joinChains.add(new JoinChain("cubeCityStateCountry", "cube-city-state-country", "country through state thru city") {
+      {
+        addPath(new ArrayList<TableReference>() {
           {
-            addPath(new ArrayList<TableReference>() {
-              {
-                add(new TableReference("basecube", "dim2"));
-                add(new TableReference("testdim2", "id"));
-              }
-            });
+            add(new TableReference("basecube", "cityid"));
+            add(new TableReference("citydim", "id"));
+            add(new TableReference("citydim", "stateid"));
+            add(new TableReference("statedim", "id"));
+            add(new TableReference("statedim", "countryid"));
+            add(new TableReference("countrydim", "id"));
           }
         });
-        add(new JoinChain("userSports", "user-sports", "user sports") {
+      }
+    });
+    joinChains.add(new JoinChain("userSports", "user-sports", "user sports") {
+      {
+        addPath(new ArrayList<TableReference>() {
           {
-            addPath(new ArrayList<TableReference>() {
-              {
-                add(new TableReference("basecube", "userid"));
-                add(new TableReference("userdim", "id"));
-                add(new TableReference("userdim", "id"));
-                add(new TableReference("user_interests", "user_id", true));
-                add(new TableReference("user_interests", "sport_id"));
-                add(new TableReference("sports", "id"));
-              }
-            });
+            add(new TableReference("basecube", "userid"));
+            add(new TableReference("userdim", "id"));
+            add(new TableReference("userdim", "id"));
+            add(new TableReference("user_interests", "user_id", true));
+            add(new TableReference("user_interests", "sport_id"));
+            add(new TableReference("sports", "id"));
           }
         });
-        add(new JoinChain("userInterestIds", "user-interestsIds", "user interest ids") {
+      }
+    });
+    joinChains.add(new JoinChain("userInterestIds", "user-interestsIds", "user interest ids") {
+      {
+        addPath(new ArrayList<TableReference>() {
           {
-            addPath(new ArrayList<TableReference>() {
-              {
-                add(new TableReference("basecube", "userid"));
-                add(new TableReference("userdim", "id"));
-                add(new TableReference("userdim", "id"));
-                add(new TableReference("user_interests", "user_id", true));
-              }
-            });
+            add(new TableReference("basecube", "userid"));
+            add(new TableReference("userdim", "id"));
+            add(new TableReference("userdim", "id"));
+            add(new TableReference("user_interests", "user_id", true));
           }
         });
-        add(new JoinChain("xuserSports", "xuser-sports", "xuser sports") {
+      }
+    });
+    joinChains.add(new JoinChain("xuserSports", "xuser-sports", "xuser sports") {
+      {
+        addPath(new ArrayList<TableReference>() {
           {
-            addPath(new ArrayList<TableReference>() {
-              {
-                add(new TableReference("basecube", "xuserid"));
-                add(new TableReference("userdim", "id"));
-                add(new TableReference("userdim", "id"));
-                add(new TableReference("user_interests", "user_id", true));
-                add(new TableReference("user_interests", "sport_id"));
-                add(new TableReference("sports", "id"));
-              }
-            });
+            add(new TableReference("basecube", "xuserid"));
+            add(new TableReference("userdim", "id"));
+            add(new TableReference("userdim", "id"));
+            add(new TableReference("user_interests", "user_id", true));
+            add(new TableReference("user_interests", "sport_id"));
+            add(new TableReference("sports", "id"));
           }
         });
-        add(new JoinChain("yuserSports", "user-sports", "user sports") {
+      }
+    });
+    joinChains.add(new JoinChain("yuserSports", "user-sports", "user sports") {
+      {
+        addPath(new ArrayList<TableReference>() {
           {
-            addPath(new ArrayList<TableReference>() {
-              {
-                add(new TableReference("basecube", "yuserid"));
-                add(new TableReference("userdim", "id"));
-                add(new TableReference("userdim", "id"));
-                add(new TableReference("user_interests", "user_id", true));
-                add(new TableReference("user_interests", "sport_id"));
-                add(new TableReference("sports", "id"));
-              }
-            });
+            add(new TableReference("basecube", "yuserid"));
+            add(new TableReference("userdim", "id"));
+            add(new TableReference("userdim", "id"));
+            add(new TableReference("user_interests", "user_id", true));
+            add(new TableReference("user_interests", "sport_id"));
+            add(new TableReference("sports", "id"));
           }
         });
       }
-    };
+    });
 
     // add ref dim through chain
     cubeDimensions2.add(
-      new ReferencedDimAtrribute(new FieldSchema("cityStateCapital", "string", "State's capital thru city"),
+      new ReferencedDimAttribute(new FieldSchema("cityStateCapital", "string", "State's capital thru city"),
         "State's capital thru city", "cityState", "capital", null, null, null));
-    client.createCube(BASE_CUBE_NAME, cubeMeasures2, cubeDimensions2, exprs, joinchains, cubeProperties);
+    client.createCube(BASE_CUBE_NAME, cubeMeasures2, cubeDimensions2, exprs, joinChains, cubeProperties);
 
     Map<String, String> derivedProperties = new HashMap<String, String>();
     derivedProperties.put(MetastoreConstants.CUBE_ALL_FIELDS_QUERIABLE, "true");
-    Set<String> measures = new HashSet<String>();
+    Set<String> measures = new HashSet<>();
     measures.add("msr1");
     measures.add("msr11");
-    Set<String> dimensions = new HashSet<String>();
+    Set<String> dimensions = new HashSet<>();
     dimensions.add("dim1");
     dimensions.add("dim11");
     dimensions.add("d_time");
     client.createDerivedCube(BASE_CUBE_NAME, DERIVED_CUBE_NAME1, measures, dimensions, derivedProperties, 5L);
 
-    measures = new HashSet<String>();
+    measures = new HashSet<>();
     measures.add("msr2");
     measures.add("msr12");
     measures.add("msr13");
     measures.add("msr14");
-    dimensions = new HashSet<String>();
+    dimensions = new HashSet<>();
     dimensions.add("cityid");
     dimensions.add("stateid");
     dimensions.add("userid");
@@ -884,20 +1106,33 @@ public class CubeTestSetup {
     dimensions.add("yuserid");
     dimensions.add("dim1");
     dimensions.add("dim2");
+    dimensions.add("dim2big1");
+    dimensions.add("dim2big2");
+    dimensions.add("dim2bignew");
     dimensions.add("dim11");
     dimensions.add("dim12");
     dimensions.add("dim22");
     dimensions.add("d_time");
     dimensions.add("test_time_dim");
+    dimensions.add("test_time_dim2");
+    dimensions.add("test_time_dim_hour_id");
+    dimensions.add("test_time_dim_day_id");
+    dimensions.add("test_time_dim_hour_id2");
+    dimensions.add("test_time_dim_day_id2");
     client.createDerivedCube(BASE_CUBE_NAME, DERIVED_CUBE_NAME2, measures, dimensions, derivedProperties, 10L);
-    measures = new HashSet<String>();
+    measures = new HashSet<>();
     measures.add("msr3");
     measures.add("msr13");
-    dimensions = new HashSet<String>();
+    dimensions = new HashSet<>();
     dimensions.add("dim1");
     dimensions.add("location");
     dimensions.add("d_time");
     dimensions.add("test_time_dim");
+    dimensions.add("test_time_dim2");
+    dimensions.add("test_time_dim_hour_id");
+    dimensions.add("test_time_dim_day_id");
+    dimensions.add("test_time_dim_hour_id2");
+    dimensions.add("test_time_dim_day_id2");
     client.createDerivedCube(BASE_CUBE_NAME, DERIVED_CUBE_NAME3, measures, dimensions, derivedProperties, 20L);
 
     // create base cube facts
@@ -1301,7 +1536,7 @@ public class CubeTestSetup {
     }
   }
 
-  private void createCubeFactWeekly(CubeMetastoreClient client) throws HiveException {
+  private void createCubeFactWeekly(CubeMetastoreClient client) throws Exception {
     String factName = "testFactWeekly";
     List<FieldSchema> factColumns = new ArrayList<FieldSchema>(cubeMeasures.size());
     for (CubeMeasure measure : cubeMeasures) {
@@ -1527,7 +1762,7 @@ public class CubeTestSetup {
     }
   }
 
-  private void createCubeFactMonthly(CubeMetastoreClient client) throws HiveException {
+  private void createCubeFactMonthly(CubeMetastoreClient client) throws Exception {
     String factName = "testFactMonthly";
     List<FieldSchema> factColumns = new ArrayList<FieldSchema>(cubeMeasures.size());
     for (CubeMeasure measure : cubeMeasures) {
@@ -1561,7 +1796,7 @@ public class CubeTestSetup {
   }
 
   // DimWithTwoStorages
-  private void createCityTable(CubeMetastoreClient client) throws HiveException, ParseException, LensException {
+  private void createCityTable(CubeMetastoreClient client) throws Exception {
     Set<CubeDimAttribute> cityAttrs = new HashSet<CubeDimAttribute>();
     cityAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "code")));
     cityAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "city name")));
@@ -1569,24 +1804,72 @@ public class CubeTestSetup {
     cityAttrs.add(new BaseDimAttribute(new FieldSchema("ambigdim2", "string", "used in testColumnAmbiguity")));
     cityAttrs.add(new BaseDimAttribute(new FieldSchema("nocandidatecol", "string", "used in testing no"
       + " candidate available")));
-    cityAttrs.add(new ReferencedDimAtrribute(new FieldSchema("stateid", "int", "state id"), "State refer",
-      new TableReference("statedim", "id")));
-    cityAttrs.add(new ReferencedDimAtrribute(new FieldSchema("statename", "string", "state name"), "State name",
-      new TableReference("statedim", "name")));
-    cityAttrs.add(new ReferencedDimAtrribute(new FieldSchema("zipcode", "int", "zip code"), "Zip refer",
-      new TableReference("zipdim", "code")));
+    cityAttrs.add(new BaseDimAttribute(new FieldSchema("stateid", "int", "state id")));
+    cityAttrs.add(new ReferencedDimAttribute(new FieldSchema("statename", "string", "state name"), "State name",
+      "citystate", "name", null, null, null, null));
+    cityAttrs.add(new BaseDimAttribute(new FieldSchema("zipcode", "int", "zip code")));
     Map<String, String> dimProps = new HashMap<String, String>();
     dimProps.put(MetastoreUtil.getDimTimedDimensionKey("citydim"), TestCubeMetastoreClient.getDatePartitionKey());
     Set<ExprColumn> exprs = new HashSet<ExprColumn>();
     exprs.add(new ExprColumn(new FieldSchema("CityAddress", "string", "city with state and city and zip"),
       "City Address",
-      new ExprSpec("concat(citydim.name, \":\", statedim.name, \":\", countrydim.name, \":\", zipdim.code)", null,
-        null), new ExprSpec("concat(citydim.name, \":\", statedim.name)", null, null)));
+      new ExprSpec("concat(citydim.name, \":\", citystate.name, \":\", citycountry.name, \":\", cityzip.code)", null,
+        null), new ExprSpec("concat(citydim.name, \":\", citystate.name)", null, null)));
     exprs.add(new ExprColumn(new FieldSchema("CityState", "string", "city's state"),
       "City State", new ExprSpec("concat(citydim.name, \":\", citydim.statename)", null, null)));
     exprs.add(new ExprColumn(new FieldSchema("AggrExpr", "int", "count(name)"), "city count",
       new ExprSpec("count(name)", null, null)));
-    Dimension cityDim = new Dimension("citydim", cityAttrs, exprs, dimProps, 0L);
+    Set<JoinChain> joinchains = new HashSet<JoinChain>() {
+      {
+        add(new JoinChain("cityState", "city-state", "state thru city") {
+          {
+            addPath(new ArrayList<TableReference>() {
+              {
+                add(new TableReference("citydim", "stateid"));
+                add(new TableReference("statedim", "id"));
+              }
+            });
+            addPath(new ArrayList<TableReference>() {
+              {
+                add(new TableReference("citydim", "statename"));
+                add(new TableReference("statedim", "name"));
+              }
+            });
+          }
+        });
+      }
+    };
+    joinchains.add(new JoinChain("cityCountry", "cube-zip", "country thru city") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("citydim", "stateid"));
+            add(new TableReference("statedim", "id"));
+            add(new TableReference("statedim", "countryid"));
+            add(new TableReference("countrydim", "id"));
+          }
+        });
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("citydim", "statename"));
+            add(new TableReference("statedim", "name"));
+            add(new TableReference("statedim", "countryid"));
+            add(new TableReference("countrydim", "id"));
+          }
+        });
+      }
+    });
+    joinchains.add(new JoinChain("cityZip", "city-zip", "Zipcode thru city") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("citydim", "zipcode"));
+            add(new TableReference("zipdim", "code"));
+          }
+        });
+      }
+    });
+    Dimension cityDim = new Dimension("citydim", cityAttrs, exprs, joinchains, dimProps, 0L);
     client.createDimension(cityDim);
 
     String dimName = "citytable";
@@ -1655,13 +1938,13 @@ public class CubeTestSetup {
 
   }
 
-  private void createTestDim2(CubeMetastoreClient client) throws HiveException {
+  private void createTestDim2(CubeMetastoreClient client) throws Exception {
     String dimName = "testDim2";
     Set<CubeDimAttribute> dimAttrs = new HashSet<CubeDimAttribute>();
 
-    Set<JoinChain> joinchains = new HashSet<JoinChain>();
+    Set<JoinChain> joinchains = new HashSet<>();
     JoinChain cityState = new JoinChain("cityState", "city-state", "state thru city");
-    List<TableReference> statePaths1 = new ArrayList<TableReference>();
+    List<TableReference> statePaths1 = new ArrayList<>();
     statePaths1.add(new TableReference("testDim2", "cityid"));
     statePaths1.add(new TableReference("citydim", "id"));
     statePaths1.add(new TableReference("citydim", "stateid"));
@@ -1674,24 +1957,64 @@ public class CubeTestSetup {
     statePaths2.add(new TableReference("statedim", "name"));
     cityState.addPath(statePaths2);
     joinchains.add(cityState);
-
-
+    joinchains.add(new JoinChain("dim2city", "dim2-city", "city thru dim2") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("testdim2", "cityid"));
+            add(new TableReference("citydim", "id"));
+          }
+        });
+      }
+    });
+    joinchains.add(new JoinChain("dim3chain", "dim3-chain", "dim3 thru dim2") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("testdim2", "testDim3id"));
+            add(new TableReference("testdim3", "id"));
+          }
+        });
+      }
+    });
+    joinchains.add(new JoinChain("unreachableDim_chain", "dim2-unreachableDim", "unreachableDim thru dim2") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("testdim2", "urdimid"));
+            add(new TableReference("unreachableDim", "id"));
+          }
+        });
+      }
+    });
+    joinchains.add(new JoinChain("dim4chain", "cube-testdim3", "cyclicdim thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("testdim2", "testdim3id"));
+            add(new TableReference("testdim3", "id"));
+            add(new TableReference("testdim3", "testdim4id"));
+            add(new TableReference("testdim4", "id"));
+          }
+        });
+      }
+    });
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "code")));
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("bigid1", "bigint", "big id")));
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("bigid2", "bigint", "big id")));
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("bigidnew", "bigint", "big id")));
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "name")));
-    dimAttrs.add(new ReferencedDimAtrribute(new FieldSchema("testDim3id", "string", "f-key to testdim3"), "Dim3 refer",
-      new TableReference("testdim3", "id")));
-    dimAttrs.add(new ReferencedDimAtrribute(new FieldSchema("cityId", "string", "f-key to citydim"), "cityid",
-      new TableReference("citydim", "id")));
-    dimAttrs.add(new ReferencedDimAtrribute(new FieldSchema("cityname", "string", "name"), "cityid",
-      new TableReference("citydim", "name"), null, null, 0.0, false));
-    dimAttrs.add(new ReferencedDimAtrribute(new FieldSchema("urdimid", "int", "ref dim"), "urdim refer",
-      new TableReference("unreachableDim", "id"), null, null, null, false, 10L));
-
+    dimAttrs.add(new ReferencedDimAttribute(new FieldSchema("testDim3id", "string", "f-key to testdim3"), "dim3 refer",
+      "dim3chain", "id", null, null, 0.0));
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("cityId", "string", "f-key to citydim")));
+    dimAttrs.add(new ReferencedDimAttribute(new FieldSchema("cityname", "string", "name"), "cityname",
+      "dim2city", "name", null, null, 0.0));
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("urdimid", "int", "ref dim"), "urdim refer",
+      null, null, 10.0));
+    dimAttrs.add(new ReferencedDimAttribute(new FieldSchema("unreachableName", "string", ""), "urdim name",
+      "unreachableDim_chain", "name", null, null, 10.0));
     // add ref dim through chain
-    dimAttrs.add(new ReferencedDimAtrribute(
+    dimAttrs.add(new ReferencedDimAttribute(
       new FieldSchema("cityStateCapital", "string", "State's capital thru city"), "State's capital thru city",
       "cityState", "capital", null, null, null));
 
@@ -1753,7 +2076,7 @@ public class CubeTestSetup {
     client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 20L, dumpPeriods, dimProps, storageTables);
   }
 
-  private void createTimeDims(CubeMetastoreClient client) throws HiveException {
+  private void createTimeDims(CubeMetastoreClient client) throws Exception {
     String dimName = "dayDim";
     Set<CubeDimAttribute> dimAttrs = new HashSet<CubeDimAttribute>();
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "code")));
@@ -1812,17 +2135,30 @@ public class CubeTestSetup {
 
   }
 
-  private void createTestDim3(CubeMetastoreClient client) throws HiveException {
+  private void createTestDim3(CubeMetastoreClient client) throws Exception {
     String dimName = "testDim3";
 
     Set<CubeDimAttribute> dimAttrs = new HashSet<CubeDimAttribute>();
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "code")));
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "name")));
-    dimAttrs.add(new ReferencedDimAtrribute(new FieldSchema("testDim4id", "string", "f-key to testdim4"), "Dim4 refer",
-      new TableReference("testdim4", "id")));
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("testDim4id", "string", "f-key to testdim4")));
     Map<String, String> dimProps = new HashMap<String, String>();
     dimProps.put(MetastoreUtil.getDimTimedDimensionKey(dimName), TestCubeMetastoreClient.getDatePartitionKey());
-    Dimension testDim3 = new Dimension(dimName, dimAttrs, dimProps, 0L);
+    Set<JoinChain> joinchains = new HashSet<JoinChain>() {
+      {
+        add(new JoinChain("dim4chain", "dim4-chain", "dim4 thru dim3") {
+          {
+            addPath(new ArrayList<TableReference>() {
+              {
+                add(new TableReference("testdim3", "testDim4id"));
+                add(new TableReference("testdim4", "id"));
+              }
+            });
+          }
+        });
+      }
+    };
+    Dimension testDim3 = new Dimension(dimName, dimAttrs, null, joinchains, dimProps, 0L);
     client.createDimension(testDim3);
 
     String dimTblName = "testDim3Tbl";
@@ -1855,7 +2191,7 @@ public class CubeTestSetup {
     client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
   }
 
-  private void createTestDim4(CubeMetastoreClient client) throws HiveException {
+  private void createTestDim4(CubeMetastoreClient client) throws Exception {
     String dimName = "testDim4";
 
     Set<CubeDimAttribute> dimAttrs = new HashSet<CubeDimAttribute>();
@@ -1896,17 +2232,30 @@ public class CubeTestSetup {
     client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
   }
 
-  private void createCyclicDim1(CubeMetastoreClient client) throws HiveException {
+  private void createCyclicDim1(CubeMetastoreClient client) throws Exception {
     String dimName = "cycleDim1";
 
     Set<CubeDimAttribute> dimAttrs = new HashSet<CubeDimAttribute>();
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "code")));
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "name")));
-    dimAttrs.add(new ReferencedDimAtrribute(new FieldSchema("cyleDim2Id", "string", "link to cyclic dim 2"),
-      "cycle refer2", new TableReference("cycleDim2", "id")));
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("cyleDim2Id", "string", "link to cyclic dim 2")));
     Map<String, String> dimProps = new HashMap<String, String>();
     dimProps.put(MetastoreUtil.getDimTimedDimensionKey(dimName), TestCubeMetastoreClient.getDatePartitionKey());
-    Dimension cycleDim1 = new Dimension(dimName, dimAttrs, dimProps, 0L);
+    Set<JoinChain> joinchains = new HashSet<JoinChain>() {
+      {
+        add(new JoinChain("cycledim2chain", "cycledim2chain", "cycledim2chain") {
+          {
+            addPath(new ArrayList<TableReference>() {
+              {
+                add(new TableReference("cycledim1", "cyleDim2Id"));
+                add(new TableReference("cycleDim2", "id"));
+              }
+            });
+          }
+        });
+      }
+    };
+    Dimension cycleDim1 = new Dimension(dimName, dimAttrs, null, joinchains, dimProps, 0L);
     client.createDimension(cycleDim1);
 
     String dimTblName = "cycleDim1Tbl";
@@ -1941,17 +2290,30 @@ public class CubeTestSetup {
     client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
   }
 
-  private void createCyclicDim2(CubeMetastoreClient client) throws HiveException {
+  private void createCyclicDim2(CubeMetastoreClient client) throws Exception {
     String dimName = "cycleDim2";
 
     Set<CubeDimAttribute> dimAttrs = new HashSet<CubeDimAttribute>();
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "code")));
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "name")));
-    dimAttrs.add(new ReferencedDimAtrribute(new FieldSchema("cyleDim1Id", "string", "link to cyclic dim 1"),
-      "Cycle refer1", new TableReference("cycleDim1", "id")));
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("cyleDim1Id", "string", "link to cyclic dim 1")));
     Map<String, String> dimProps = new HashMap<String, String>();
     dimProps.put(MetastoreUtil.getDimTimedDimensionKey(dimName), TestCubeMetastoreClient.getDatePartitionKey());
-    Dimension cycleDim2 = new Dimension(dimName, dimAttrs, dimProps, 0L);
+    Set<JoinChain> joinchains = new HashSet<JoinChain>() {
+      {
+        add(new JoinChain("cycledim1chain", "cycledim1chain", "cycledim1chain") {
+          {
+            addPath(new ArrayList<TableReference>() {
+              {
+                add(new TableReference("cycledim2", "cyleDim1Id"));
+                add(new TableReference("cycleDim1", "id"));
+              }
+            });
+          }
+        });
+      }
+    };
+    Dimension cycleDim2 = new Dimension(dimName, dimAttrs, null, joinchains, dimProps, 0L);
     client.createDimension(cycleDim2);
 
     String dimTblName = "cycleDim2Tbl";
@@ -2113,11 +2475,24 @@ public class CubeTestSetup {
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "code")));
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "name")));
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("capital", "string", "field2")));
-    dimAttrs.add(new ReferencedDimAtrribute(new FieldSchema("countryid", "string", "link to country table"),
-      "Country refer", new TableReference("countrydim", "id")));
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("countryid", "string", "link to country table")));
     Map<String, String> dimProps = new HashMap<String, String>();
     dimProps.put(MetastoreUtil.getDimTimedDimensionKey(dimName), TestCubeMetastoreClient.getDatePartitionKey());
-    Dimension countryDim = new Dimension(dimName, dimAttrs, dimProps, 0L);
+    Set<JoinChain> joinchains = new HashSet<JoinChain>() {
+      {
+        add(new JoinChain("countrychain", "countrychain", "countrychain") {
+          {
+            addPath(new ArrayList<TableReference>() {
+              {
+                add(new TableReference("statedim", "countryid"));
+                add(new TableReference("country", "id"));
+              }
+            });
+          }
+        });
+      }
+    };
+    Dimension countryDim = new Dimension(dimName, dimAttrs, null, joinchains, dimProps, 0L);
     client.createDimension(countryDim);
 
     String dimTblName = "statetable";

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
index ff7c15f..fe2dfb3 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
@@ -46,6 +46,8 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
   public void beforeClassFieldsCannotBeQueriedTogetherTest() {
     conf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, true);
     conf.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, false);
+    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
+
   }
 
   @Test
@@ -84,7 +86,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
     disallowed with appropriate exception. */
 
     testFieldsCannotBeQueriedTogetherError("select substrexprdim2, SUM(msr1) from basecube where " + TWO_DAYS_RANGE,
-        Arrays.asList("dim2", "d_time", "msr1"));
+        Arrays.asList("dim2", "d_time", "dim2chain.name", "msr1"));
   }
 
   @Test
@@ -97,7 +99,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
     derived cube, hence query shall be disallowed with appropriate exception. */
 
     testFieldsCannotBeQueriedTogetherError("select substrexprdim2, sum(roundedmsr1) from basecube where "
-      + TWO_DAYS_RANGE, Arrays.asList("dim2", "d_time", "msr1"));
+      + TWO_DAYS_RANGE, Arrays.asList("dim2", "d_time", "dim2chain.name", "msr1"));
   }
 
   @Test
@@ -248,7 +250,8 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
      *  disallowed */
 
     testFieldsCannotBeQueriedTogetherError("select substrexprdim2, cubeStateName, countryid, SUM(msr2) from basecube"
-            + " where " + TWO_DAYS_RANGE, Arrays.asList("countryid", "dim2", "cubestate.name",  "d_time"));
+            + " where " + TWO_DAYS_RANGE,
+      Arrays.asList("countryid", "dim2", "cubestate.name",  "d_time", "dim2chain.name"));
   }
 
   @Test
@@ -340,7 +343,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
           + "Query got re-written to:" + hqlQuery);
     } catch(FieldsCannotBeQueriedTogetherException actualException) {
 
-      SortedSet<String> expectedFields = new TreeSet<String>(conflictingFields);
+      SortedSet<String> expectedFields = new TreeSet<>(conflictingFields);
 
       FieldsCannotBeQueriedTogetherException expectedException =
           new FieldsCannotBeQueriedTogetherException(new ConflictingFields(expectedFields));

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 5b44f95..57a15e2 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -421,15 +421,15 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
   @Test
   public void testMultiFactQueryWithJoins() throws Exception {
     // query with join
-    String hqlQuery = rewrite("select testdim2.name, msr12, roundedmsr2 from basecube where " + TWO_DAYS_RANGE, conf);
+    String hqlQuery = rewrite("select dim2chain.name, msr12, roundedmsr2 from basecube where " + TWO_DAYS_RANGE, conf);
     String expected1 = getExpectedQuery(cubeName,
-        "select testdim2.name as `name`, sum(basecube.msr12) as `msr12` FROM ", " JOIN " + getDbName()
-            + "c1_testdim2tbl testdim2 ON basecube.dim2 = " + " testdim2.id and (testdim2.dt = 'latest') ", null,
-        " group by testdim2.name", null, getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+        "select dim2chain.name as `name`, sum(basecube.msr12) as `msr12` FROM ", " JOIN " + getDbName()
+            + "c1_testdim2tbl dim2chain ON basecube.dim2 = " + " dim2chain.id and (dim2chain.dt = 'latest') ", null,
+        " group by dim2chain.name", null, getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     String expected2 = getExpectedQuery(cubeName,
-        "select testdim2.name as `name`, round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", " JOIN " + getDbName()
-            + "c1_testdim2tbl testdim2 ON basecube.dim2 = " + " testdim2.id and (testdim2.dt = 'latest') ", null,
-        " group by testdim2.name", null, getWhereForHourly2days(cubeName, "C1_testfact1_raw_base"));
+        "select dim2chain.name as `name`, round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", " JOIN " + getDbName()
+            + "c1_testdim2tbl dim2chain ON basecube.dim2 = " + " dim2chain.id and (dim2chain.dt = 'latest') ", null,
+        " group by dim2chain.name", null, getWhereForHourly2days(cubeName, "C1_testfact1_raw_base"));
     TestCubeRewriter.compareContains(expected1, hqlQuery);
     TestCubeRewriter.compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
@@ -444,9 +444,9 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
   public void testMultiFactQueryWithDenormColumn() throws Exception {
     // query with denorm variable
     String hqlQuery = rewrite("select dim2, msr13, roundedmsr2 from basecube" + " where " + TWO_DAYS_RANGE, conf);
-    String expected1 = getExpectedQuery(cubeName, "select testdim2.id as `dim2`, max(basecube.msr13) as `msr13` FROM ",
-        " JOIN " + getDbName() + "c1_testdim2tbl testdim2 ON basecube.dim12 = "
-            + " testdim2.id and (testdim2.dt = 'latest') ", null, " group by testdim2.id", null,
+    String expected1 = getExpectedQuery(cubeName, "select dim2chain.id as `dim2`, max(basecube.msr13) as `msr13` FROM ",
+        " JOIN " + getDbName() + "c1_testdim2tbl dim2chain ON basecube.dim12 = "
+            + " dim2chain.id and (dim2chain.dt = 'latest') ", null, " group by dim2chain.id", null,
         getWhereForHourly2days(cubeName, "C1_testFact3_RAW_BASE"));
     String expected2 = getExpectedQuery(cubeName,
         "select basecube.dim2 as `dim2`, round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", null,
@@ -470,10 +470,10 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
         "select booleancut, round(sum(msr2)/1000), avg(msr13 + msr14) from basecube" + " where " + TWO_DAYS_RANGE,
         conf);
     String expected1 =
-      getExpectedQuery(cubeName, "select basecube.dim1 != 'x' AND testdim2.id != 10 as `booleancut`,"
+      getExpectedQuery(cubeName, "select basecube.dim1 != 'x' AND dim2chain.id != 10 as `booleancut`,"
           + " avg(basecube.msr13 + basecube.msr14) as `expr3` FROM ", " JOIN " + getDbName()
-          + "c1_testdim2tbl testdim2 ON basecube.dim12 = " + " testdim2.id and (testdim2.dt = 'latest') ", null,
-        " group by basecube.dim1 != 'x' AND testdim2.id != 10", null,
+          + "c1_testdim2tbl dim2chain ON basecube.dim12 = " + " dim2chain.id and (dim2chain.dt = 'latest') ", null,
+        " group by basecube.dim1 != 'x' AND dim2chain.id != 10", null,
         getWhereForHourly2days(cubeName, "C1_testfact3_raw_base"));
     String expected2 =
       getExpectedQuery(cubeName, "select basecube.dim1 != 'x' AND basecube.dim2 != 10 as `booleancut`,"


[34/51] [abbrv] lens git commit: LENS-916: docker image creation is failing.

Posted by de...@apache.org.
LENS-916: docker image creation is failing.


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/27a0cad0
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/27a0cad0
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/27a0cad0

Branch: refs/heads/current-release-line
Commit: 27a0cad0d1ece88dc7d0937b05848ad6f92208dd
Parents: 91ccec7
Author: piyush <pi...@gmail.com>
Authored: Mon Jan 25 11:40:58 2016 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Mon Jan 25 11:40:58 2016 +0530

----------------------------------------------------------------------
 lens-docker/lens-test/Dockerfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/27a0cad0/lens-docker/lens-test/Dockerfile
----------------------------------------------------------------------
diff --git a/lens-docker/lens-test/Dockerfile b/lens-docker/lens-test/Dockerfile
index 734f964..885674d 100644
--- a/lens-docker/lens-test/Dockerfile
+++ b/lens-docker/lens-test/Dockerfile
@@ -17,7 +17,7 @@
 
 FROM inmobi/docker-hive
 
-RUN wget http://apache.mirrors.lucidnetworks.net/spark/spark-1.3.0/spark-1.3.0-bin-hadoop2.4.tgz
+RUN wget https://archive.apache.org/dist/spark/spark-1.3.0/spark-1.3.0-bin-hadoop2.4.tgz
 RUN gunzip spark-1.3.0-bin-hadoop2.4.tgz
 RUN tar -xvf spark-1.3.0-bin-hadoop2.4.tar
 RUN mv spark-1.3.0-bin-hadoop2.4 /usr/local


[48/51] [abbrv] lens git commit: LENS-920 : Fix issues in producing and consuming json for all api

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java b/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java
index 94dd394..ddca12f 100644
--- a/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java
+++ b/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java
@@ -75,7 +75,8 @@ public final class LensServerTestUtil {
    * @param schemaStr     the schema string, with surrounding parenthesis.
    * @throws InterruptedException the interrupted exception
    */
-  public static void createTable(String tblName, WebTarget parent, LensSessionHandle lensSessionId, String schemaStr)
+  public static void createTable(String tblName, WebTarget parent, LensSessionHandle lensSessionId, String schemaStr,
+    MediaType mt)
     throws InterruptedException {
     LensConf conf = new LensConf();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
@@ -85,21 +86,21 @@ public final class LensServerTestUtil {
     String createTable = "CREATE TABLE IF NOT EXISTS " + tblName + schemaStr;
 
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), createTable));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    final QueryHandle handle = target.request()
+    final QueryHandle handle = target.request(mt)
         .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
             new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
     // wait till the query finishes
-    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(mt)
       .get(LensQuery.class);
     QueryStatus stat = ctx.getStatus();
     while (!stat.finished()) {
-      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(mt).get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
     }
@@ -112,13 +113,13 @@ public final class LensServerTestUtil {
     assertTrue(ctx.getFinishTime() > 0, debugHelpMsg);
   }
 
-  public static void createTable(String tblName, WebTarget parent, LensSessionHandle lensSessionId)
+  public static void createTable(String tblName, WebTarget parent, LensSessionHandle lensSessionId, MediaType mt)
     throws InterruptedException {
-    createTable(tblName, parent, lensSessionId, "(ID INT, IDSTR STRING)");
+    createTable(tblName, parent, lensSessionId, "(ID INT, IDSTR STRING)", mt);
   }
 
   public static void loadData(String tblName, final String testDataFile, WebTarget parent,
-      LensSessionHandle lensSessionId) throws InterruptedException {
+      LensSessionHandle lensSessionId, MediaType mt) throws InterruptedException {
     LensConf conf = new LensConf();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
     final WebTarget target = parent.path("queryapi/queries");
@@ -127,21 +128,21 @@ public final class LensServerTestUtil {
     String dataLoad = "LOAD DATA LOCAL INPATH '" + testDataFile + "' OVERWRITE INTO TABLE " + tblName;
 
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-        MediaType.APPLICATION_XML_TYPE));
+        mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), dataLoad));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-        MediaType.APPLICATION_XML_TYPE));
+        mt));
 
-    final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+    final QueryHandle handle = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
         new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
     // wait till the query finishes
-    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(mt)
         .get(LensQuery.class);
     QueryStatus stat = ctx.getStatus();
     while (!stat.finished()) {
-      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(mt).get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
     }
@@ -157,10 +158,10 @@ public final class LensServerTestUtil {
    * @throws InterruptedException the interrupted exception
    */
   public static void loadDataFromClasspath(String tblName, final String testDataFile, WebTarget parent,
-      LensSessionHandle lensSessionId) throws InterruptedException {
+      LensSessionHandle lensSessionId, MediaType mt) throws InterruptedException {
 
     String absolutePath = LensServerTestUtil.class.getClassLoader().getResource(testDataFile).getPath();
-    loadData(tblName, absolutePath, parent, lensSessionId);
+    loadData(tblName, absolutePath, parent, lensSessionId, mt);
   }
 
   /**
@@ -171,11 +172,11 @@ public final class LensServerTestUtil {
    * @param lensSessionId the lens session id
    * @throws InterruptedException the interrupted exception
    */
-  public static void dropTable(String tblName, WebTarget parent, LensSessionHandle lensSessionId)
+  public static void dropTable(String tblName, WebTarget parent, LensSessionHandle lensSessionId, MediaType mt)
     throws InterruptedException {
     LensConf conf = new LensConf();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
-    dropTableWithConf(tblName, parent, lensSessionId, conf);
+    dropTableWithConf(tblName, parent, lensSessionId, conf, mt);
   }
 
   /**
@@ -189,28 +190,28 @@ public final class LensServerTestUtil {
    * @throws InterruptedException
    */
   public static void dropTableWithConf(String tblName, WebTarget parent, LensSessionHandle lensSessionId,
-    LensConf conf) throws InterruptedException {
+    LensConf conf, MediaType mt) throws InterruptedException {
     final WebTarget target = parent.path("queryapi/queries");
 
     final FormDataMultiPart mp = new FormDataMultiPart();
     String createTable = "DROP TABLE IF EXISTS " + tblName;
 
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), createTable));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+    final QueryHandle handle = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
         new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
     // wait till the query finishes
-    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(mt)
       .get(LensQuery.class);
     QueryStatus stat = ctx.getStatus();
     while (!stat.finished()) {
-      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(mt).get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/TestLensApplication.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/TestLensApplication.java b/lens-server/src/test/java/org/apache/lens/server/TestLensApplication.java
index 4f9f4d2..6636346 100644
--- a/lens-server/src/test/java/org/apache/lens/server/TestLensApplication.java
+++ b/lens-server/src/test/java/org/apache/lens/server/TestLensApplication.java
@@ -23,7 +23,6 @@ import static org.testng.Assert.assertEquals;
 import java.util.List;
 
 import javax.ws.rs.client.WebTarget;
-import javax.ws.rs.core.Application;
 import javax.ws.rs.core.Response;
 
 import org.apache.lens.server.api.metrics.MetricsService;
@@ -39,17 +38,7 @@ import com.codahale.metrics.ScheduledReporter;
  * The Class TestLensApplication.
  */
 @Test(alwaysRun = true, groups = "unit-test")
-public class TestLensApplication extends LensJerseyTest {
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.glassfish.jersey.test.JerseyTest#configure()
-   */
-  @Override
-  protected Application configure() {
-    return new LensApplication();
-  }
+public class TestLensApplication extends LensAllApplicationJerseyTest {
 
   /**
    * Setup.

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java b/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java
index 75f21e1..caf968a 100644
--- a/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java
+++ b/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java
@@ -61,7 +61,8 @@ public class TestServerMode extends LensAllApplicationJerseyTest {
   @BeforeTest
   public void setUp() throws Exception {
     super.setUp();
-    LensServerTestUtil.createTable("test_table", target(), RestAPITestUtil.openFooBarSession(target()));
+    LensServerTestUtil.createTable("test_table", target(), RestAPITestUtil.openFooBarSession(target(), defaultMT),
+      defaultMT);
   }
 
   /*

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java b/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
index 877200f..0f55d9e 100644
--- a/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
+++ b/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
@@ -142,14 +142,14 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     createRestartTestDataFile();
 
     // Create a test table
-    createTable("test_server_restart", target(), lensSessionId);
-    loadData("test_server_restart", TestResourceFile.TEST_DATA_FILE.getValue(), target(), lensSessionId);
+    createTable("test_server_restart", target(), lensSessionId, defaultMT);
+    loadData("test_server_restart", TestResourceFile.TEST_DATA_FILE.getValue(), target(), lensSessionId, defaultMT);
     log.info("Loaded data");
 
     // test post execute op
     final WebTarget target = target().path("queryapi/queries");
 
-    List<QueryHandle> launchedQueries = new ArrayList<QueryHandle>();
+    List<QueryHandle> launchedQueries = new ArrayList<>();
     final int NUM_QUERIES = 10;
 
     boolean killed = false;
@@ -168,19 +168,18 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
 
       final FormDataMultiPart mp = new FormDataMultiPart();
       mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-        MediaType.APPLICATION_XML_TYPE));
+        defaultMT));
       mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(),
         "select COUNT(ID) from test_server_restart"));
       mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
       mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(),
-        new LensConf(), MediaType.APPLICATION_XML_TYPE));
-      final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+        new LensConf(), defaultMT));
+      final QueryHandle handle = target.request(defaultMT).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
         new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
       Assert.assertNotNull(handle);
-      LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+      LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(defaultMT)
         .get(LensQuery.class);
-      QueryStatus stat = ctx.getStatus();
       log.info("{} submitted query {} state: {}", i, handle, ctx.getStatus().getStatus());
       launchedQueries.add(handle);
     }
@@ -196,18 +195,19 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     for (QueryHandle handle : launchedQueries) {
       log.info("Polling query {}", handle);
       try {
-        LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+        LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(defaultMT)
           .get(LensQuery.class);
         QueryStatus stat = ctx.getStatus();
         while (!stat.finished()) {
           log.info("Polling query {} Status:{}", handle, stat);
-          ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+          ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(defaultMT)
+            .get(LensQuery.class);
           stat = ctx.getStatus();
           Thread.sleep(1000);
         }
         assertEquals(ctx.getStatus().getStatus(), QueryStatus.Status.SUCCESSFUL, "Expected to be successful " + handle);
         PersistentQueryResult resultset = target.path(handle.toString()).path("resultset")
-          .queryParam("sessionid", lensSessionId).request().get(PersistentQueryResult.class);
+          .queryParam("sessionid", lensSessionId).request(defaultMT).get(PersistentQueryResult.class);
         List<String> rows = TestQueryService.readResultSet(resultset, handle, true);
         assertEquals(rows.size(), 1);
         assertEquals(rows.get(0), "" + NROWS);
@@ -218,7 +218,7 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
       }
     }
     log.info("End server restart test");
-    LensServerTestUtil.dropTable("test_server_restart", target(), lensSessionId);
+    LensServerTestUtil.dropTable("test_server_restart", target(), lensSessionId, defaultMT);
     queryService.closeSession(lensSessionId);
   }
 
@@ -249,9 +249,9 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     log.info("@@ Added resource {}", dataFile.toURI());
 
     // Create a test table
-    createTable("test_hive_server_restart", target(), lensSessionId);
+    createTable("test_hive_server_restart", target(), lensSessionId, defaultMT);
     loadData("test_hive_server_restart", TestResourceFile.TEST_DATA_FILE.getValue(), target(),
-      lensSessionId);
+      lensSessionId, defaultMT);
     log.info("Loaded data");
 
     log.info("Hive Server restart test");
@@ -261,24 +261,25 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     // Submit query, restart HS2, submit another query
     FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      defaultMT));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(),
       "select COUNT(ID) from test_hive_server_restart"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
-    QueryHandle handle = target.request()
+      defaultMT));
+    QueryHandle handle = target.request(defaultMT)
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
         new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
     Assert.assertNotNull(handle);
 
     // wait for query to move out of QUEUED state
-    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(defaultMT)
         .get(LensQuery.class);
     QueryStatus stat = ctx.getStatus();
     while (stat.queued()) {
-      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(defaultMT)
+        .get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
     }
@@ -312,12 +313,13 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     verifyParamOnRestart(lensSessionId);
 
     // Poll for first query, we should not get any exception
-    ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+    ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(defaultMT)
       .get(LensQuery.class);
     stat = ctx.getStatus();
     while (!stat.finished()) {
       log.info("Polling query {} Status:{}", handle, stat);
-      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(defaultMT)
+        .get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
     }
@@ -332,18 +334,19 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     final String query = "select COUNT(ID) from test_hive_server_restart";
     Response response = null;
     while (response == null || response.getStatus() == Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()) {
-      response = execute(target(), Optional.of(lensSessionId), Optional.of(query));
+      response = execute(target(), Optional.of(lensSessionId), Optional.of(query), defaultMT);
       Thread.sleep(1000);
     }
 
     handle = response.readEntity(new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
     // Poll for second query, this should finish successfully
-    ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+    ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(defaultMT).get(LensQuery.class);
     stat = ctx.getStatus();
     while (!stat.finished()) {
       log.info("Post restart polling query {} Status:{}", handle, stat);
-      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(defaultMT)
+        .get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
     }
@@ -364,7 +367,7 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     // "Expected to be successful " + handle);
 
     log.info("End hive server restart test");
-    LensServerTestUtil.dropTable("test_hive_server_restart", target(), lensSessionId);
+    LensServerTestUtil.dropTable("test_hive_server_restart", target(), lensSessionId, defaultMT);
     queryService.closeSession(lensSessionId);
   }
 
@@ -383,9 +386,9 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     sessionForm.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("username").build(), "foo"));
     sessionForm.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("password").build(), "bar"));
     sessionForm.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionconf").fileName("sessionconf")
-      .build(), new LensConf(), MediaType.APPLICATION_XML_TYPE));
+      .build(), new LensConf(), defaultMT));
 
-    final LensSessionHandle restartTestSession = sessionTarget.request().post(
+    final LensSessionHandle restartTestSession = sessionTarget.request(defaultMT).post(
       Entity.entity(sessionForm, MediaType.MULTIPART_FORM_DATA_TYPE), LensSessionHandle.class);
     Assert.assertNotNull(restartTestSession);
 
@@ -396,11 +399,11 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     final WebTarget resourcetarget = target().path("session/resources");
     final FormDataMultiPart mp1 = new FormDataMultiPart();
     mp1.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), restartTestSession,
-      MediaType.APPLICATION_XML_TYPE));
+      defaultMT));
     mp1.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("type").build(), "file"));
     mp1.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("path").build(),
       "target/test-classes/lens-site.xml"));
-    APIResult result = resourcetarget.path("add").request()
+    APIResult result = resourcetarget.path("add").request(defaultMT)
       .put(Entity.entity(mp1, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
     assertEquals(result.getStatus(), Status.SUCCEEDED);
 
@@ -420,18 +423,18 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     Assert.assertTrue(resourceEntry.getLocation().contains("target/test-classes/lens-site.xml"));
 
     // close session
-    result = sessionTarget.queryParam("sessionid", restartTestSession).request().delete(APIResult.class);
+    result = sessionTarget.queryParam("sessionid", restartTestSession).request(defaultMT).delete(APIResult.class);
     assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
   }
 
   private void setParams(LensSessionHandle lensSessionHandle) {
     FormDataMultiPart setpart = new FormDataMultiPart();
     setpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionHandle,
-      MediaType.APPLICATION_XML_TYPE));
+      defaultMT));
     setpart
       .bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("key").build(), "lens.session.testRestartKey"));
     setpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("value").build(), "myvalue"));
-    APIResult result = target().path("session").path("params").request()
+    APIResult result = target().path("session").path("params").request(defaultMT)
       .put(Entity.entity(setpart, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
     assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
   }
@@ -439,10 +442,10 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
   private void verifyParamOnRestart(LensSessionHandle lensSessionHandle) {
 
     StringList sessionParams = target().path("session").path("params").queryParam("sessionid", lensSessionHandle)
-      .queryParam("verbose", true).queryParam("key", "lens.session.testRestartKey").request().get(StringList.class);
+      .queryParam("verbose", true).queryParam("key", "lens.session.testRestartKey").request(defaultMT)
+      .get(StringList.class);
     System.out.println("Session params:" + sessionParams.getElements());
     assertEquals(sessionParams.getElements().size(), 1);
     Assert.assertTrue(sessionParams.getElements().contains("lens.session.testRestartKey=myvalue"));
-
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/common/FormDataMultiPartFactory.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/common/FormDataMultiPartFactory.java b/lens-server/src/test/java/org/apache/lens/server/common/FormDataMultiPartFactory.java
index 5301162..9a1d79d 100644
--- a/lens-server/src/test/java/org/apache/lens/server/common/FormDataMultiPartFactory.java
+++ b/lens-server/src/test/java/org/apache/lens/server/common/FormDataMultiPartFactory.java
@@ -41,83 +41,76 @@ public class FormDataMultiPartFactory {
   }
 
   public static FormDataMultiPart createFormDataMultiPartForQuery(final Optional<LensSessionHandle> sessionId,
-      final Optional<String> query, final Optional<String> operation, final LensConf lensConf) {
+      final Optional<String> query, final Optional<String> operation, final LensConf lensConf, MediaType mt) {
 
     final FormDataMultiPart mp = new FormDataMultiPart();
 
     if (sessionId.isPresent()) {
-      mp.bodyPart(getSessionIdFormDataBodyPart(sessionId.get()));
+      mp.bodyPart(getSessionIdFormDataBodyPart(sessionId.get(), mt));
     }
 
     if (query.isPresent()) {
-      mp.bodyPart(getFormDataBodyPart("query", query.get()));
+      mp.bodyPart(getFormDataBodyPart("query", query.get(), mt));
     }
 
     if (operation.isPresent()) {
-      mp.bodyPart(getFormDataBodyPart("operation", operation.get()));
+      mp.bodyPart(getFormDataBodyPart("operation", operation.get(), mt));
     }
 
-    mp.bodyPart(getFormDataBodyPart("conf", "conf", lensConf));
+    mp.bodyPart(getFormDataBodyPart("conf", "conf", lensConf, mt));
     return mp;
   }
 
-  public static FormDataMultiPart createFormDataMultiPartForSession(final Optional<LensSessionHandle> sessionId,
-      final Optional<String> username, final Optional<String> password, final Optional<LensConf> lensConf) {
+  public static FormDataMultiPart createFormDataMultiPartForSession(
+    final Optional<String> username, final Optional<String> password, final Optional<LensConf> lensConf,
+    final MediaType mt) {
 
     final FormDataMultiPart mp = new FormDataMultiPart();
 
-    if (sessionId.isPresent()) {
-      mp.bodyPart(getSessionIdFormDataBodyPart(sessionId.get()));
-    }
-
     if (username.isPresent()) {
-      mp.bodyPart(getFormDataBodyPart("username", username.get()));
+      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("username").build(), username.get()));
     }
 
     if (password.isPresent()) {
-      mp.bodyPart(getFormDataBodyPart("password", password.get()));
+      mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("password").build(), password.get()));
     }
 
     if (lensConf.isPresent()) {
-      mp.bodyPart(getFormDataBodyPart("sessionconf", "sessionconf", lensConf.get()));
+      mp.bodyPart(getFormDataBodyPart("sessionconf", "sessionconf", lensConf.get(), mt));
     }
 
     return mp;
   }
 
   public static FormDataMultiPart createFormDataMultiPartForFact(final LensSessionHandle sessionId,
-      final XFactTable xFactTable) {
+      final XFactTable xFactTable, MediaType mt) {
 
     final FormDataMultiPart mp = new FormDataMultiPart();
-    mp.bodyPart(getSessionIdFormDataBodyPart(sessionId));
-    mp.bodyPart(getFormDataBodyPart("fact", "fact", cubeObjectFactory.createXFactTable(xFactTable)));
+    mp.bodyPart(getSessionIdFormDataBodyPart(sessionId, mt));
+    mp.bodyPart(getFormDataBodyPart("fact", "fact", cubeObjectFactory.createXFactTable(xFactTable), mt));
 
     return mp;
   }
 
-  private static FormDataBodyPart getFormDataBodyPart(final String fdContentDispName, final String value) {
+  private static FormDataBodyPart getFormDataBodyPart(final String fdContentDispName, final String value,
+    final MediaType mt) {
     return new FormDataBodyPart(FormDataContentDisposition.name(fdContentDispName).build(), value,
-        MediaType.APPLICATION_XML_TYPE);
+        mt);
   }
 
-  private static FormDataBodyPart getFormDataBodyPart(final String fdContentDispName, final Object entity) {
+  private static FormDataBodyPart getFormDataBodyPart(final String fdContentDispName, final Object entity,
+    final MediaType mt) {
     return new FormDataBodyPart(FormDataContentDisposition.name(fdContentDispName).build(), entity,
-        MediaType.APPLICATION_XML_TYPE);
+        mt);
   }
 
   private static FormDataBodyPart getFormDataBodyPart(final String fdContentDispName, final String fileName,
-      final Object entity) {
+      final Object entity, final MediaType mt) {
     return new FormDataBodyPart(FormDataContentDisposition.name(fdContentDispName).fileName(fileName).build(), entity,
-        MediaType.APPLICATION_XML_TYPE);
-  }
-
-  private static FormDataBodyPart getFormDataBodyPartWithOutEntity(final String fdContentDispName,
-      final String fileName) {
-    return new FormDataBodyPart(FormDataContentDisposition.name(fdContentDispName).fileName(fileName).build(),
-        MediaType.APPLICATION_XML_TYPE);
+        mt);
   }
 
-  private static FormDataBodyPart getSessionIdFormDataBodyPart(final LensSessionHandle sessionId) {
-    return getFormDataBodyPart("sessionid", sessionId);
+  private static FormDataBodyPart getSessionIdFormDataBodyPart(final LensSessionHandle sessionId, MediaType mt) {
+    return getFormDataBodyPart("sessionid", sessionId, mt);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/common/RestAPITestUtil.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/common/RestAPITestUtil.java b/lens-server/src/test/java/org/apache/lens/server/common/RestAPITestUtil.java
index 4b25fd0..0e39b52 100644
--- a/lens-server/src/test/java/org/apache/lens/server/common/RestAPITestUtil.java
+++ b/lens-server/src/test/java/org/apache/lens/server/common/RestAPITestUtil.java
@@ -19,7 +19,6 @@
 
 package org.apache.lens.server.common;
 
-import static org.apache.lens.server.common.FormDataMultiPartFactory.createFormDataMultiPartForFact;
 import static org.apache.lens.server.common.FormDataMultiPartFactory.createFormDataMultiPartForSession;
 
 import static org.testng.Assert.assertEquals;
@@ -28,9 +27,11 @@ import static org.testng.Assert.assertNotNull;
 import javax.ws.rs.client.Entity;
 import javax.ws.rs.client.Invocation;
 import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.GenericEntity;
 import javax.ws.rs.core.GenericType;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
+import javax.xml.bind.JAXBElement;
 
 import org.apache.lens.api.APIResult;
 import org.apache.lens.api.LensConf;
@@ -38,10 +39,7 @@ import org.apache.lens.api.LensSessionHandle;
 import org.apache.lens.api.metastore.ObjectFactory;
 import org.apache.lens.api.metastore.XCube;
 import org.apache.lens.api.metastore.XFactTable;
-import org.apache.lens.api.query.LensQuery;
-import org.apache.lens.api.query.QueryHandle;
-import org.apache.lens.api.query.QueryResult;
-import org.apache.lens.api.query.QueryStatus;
+import org.apache.lens.api.query.*;
 import org.apache.lens.api.result.LensAPIResult;
 
 import org.glassfish.jersey.media.multipart.FormDataMultiPart;
@@ -56,43 +54,43 @@ public class RestAPITestUtil {
     throw new UnsupportedOperationException();
   }
 
-  public static LensSessionHandle openFooBarSession(final WebTarget target) {
-    return openSession(target, "foo", "bar");
+  public static LensSessionHandle openFooBarSession(final WebTarget target, MediaType mt) {
+    return openSession(target, "foo", "bar", mt);
   }
 
-  public static LensSessionHandle openSession(final WebTarget target, final String userName, final String passwd) {
-    return openSession(target, userName, passwd, new LensConf());
+  public static LensSessionHandle openSession(final WebTarget target, final String userName, final String passwd,
+    MediaType mt) {
+    return openSession(target, userName, passwd, new LensConf(), mt);
   }
 
   public static LensSessionHandle openSession(final WebTarget target, final String userName, final String passwd,
-    final LensConf conf) {
+    final LensConf conf, MediaType mt) {
 
-    final FormDataMultiPart mp = createFormDataMultiPartForSession(Optional.<LensSessionHandle>absent(),
-      Optional.of(userName), Optional.of(passwd), Optional.of(conf));
+    final FormDataMultiPart mp = createFormDataMultiPartForSession(Optional.of(userName), Optional.of(passwd),
+      Optional.of(conf), mt);
 
-    return target.path("session").request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+    return target.path("session").request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
       LensSessionHandle.class);
   }
 
   public static Response estimate(final WebTarget target, final Optional<LensSessionHandle> sessionId,
-    final Optional<String> query) {
-
-    return postQuery(target, sessionId, query, Optional.of("estimate"), Optional.<LensConf>absent());
+    final Optional<String> query, MediaType mt) {
+    return postQuery(target, sessionId, query, Optional.of("estimate"), Optional.<LensConf>absent(), mt);
   }
 
   public static Response execute(final WebTarget target, final Optional<LensSessionHandle> sessionId,
-    final Optional<String> query) {
-    return execute(target, sessionId, query, Optional.<LensConf>absent());
+    final Optional<String> query, MediaType mt) {
+    return execute(target, sessionId, query, Optional.<LensConf>absent(), mt);
   }
 
   public static Response execute(final WebTarget target, final Optional<LensSessionHandle> sessionId,
-    final Optional<String> query, final Optional<LensConf> lensConf) {
-    return postQuery(target, sessionId, query, Optional.of("execute"), lensConf);
+    final Optional<String> query, final Optional<LensConf> lensConf, MediaType mt) {
+    return postQuery(target, sessionId, query, Optional.of("execute"), lensConf, mt);
   }
 
   public static <T> T executeAndGetHandle(final WebTarget target, final Optional<LensSessionHandle> sessionId,
-    final Optional<String> query, final Optional<LensConf> lensConf) {
-    Response resp = postQuery(target, sessionId, query, Optional.of("execute"), lensConf);
+    final Optional<String> query, final Optional<LensConf> lensConf, MediaType mt) {
+    Response resp = postQuery(target, sessionId, query, Optional.of("execute"), lensConf, mt);
     assertEquals(resp.getStatus(), Response.Status.OK.getStatusCode());
     T handle = resp.readEntity(new GenericType<LensAPIResult<T>>() {}).getData();
     assertNotNull(handle);
@@ -100,116 +98,115 @@ public class RestAPITestUtil {
   }
 
   public static Response postQuery(final WebTarget target, final Optional<LensSessionHandle> sessionId,
-    final Optional<String> query, final Optional<String> operation) {
-    return postQuery(target, sessionId, query, operation, Optional.<LensConf>absent());
+                                   final Optional<String> query, final Optional<String> operation, MediaType mt) {
+    return postQuery(target, sessionId, query, operation, Optional.<LensConf>absent(), mt);
   }
 
   public static Response postQuery(final WebTarget target, final Optional<LensSessionHandle> sessionId,
-    final Optional<String> query, final Optional<String> operation, Optional<LensConf> lensConfOptional) {
+    final Optional<String> query, final Optional<String> operation, Optional<LensConf> lensConfOptional, MediaType mt) {
 
     FormDataMultiPart mp = FormDataMultiPartFactory
-      .createFormDataMultiPartForQuery(sessionId, query, operation, lensConfOptional.or(new LensConf()));
+      .createFormDataMultiPartForQuery(sessionId, query, operation, lensConfOptional.or(new LensConf()), mt);
 
-    return target.path("queryapi/queries").request(MediaType.APPLICATION_XML).post(
+    return target.path("queryapi/queries").request(mt).post(
       Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
   }
 
   public static LensQuery executeAndWaitForQueryToFinish(WebTarget target, LensSessionHandle lensSessionId,
-    String query, Optional<LensConf> conf, Optional<QueryStatus.Status> statusOptional) throws InterruptedException {
-    QueryHandle handle = executeAndGetHandle(target, Optional.of(lensSessionId), Optional.of(query), conf);
+    String query, Optional<LensConf> conf, Optional<QueryStatus.Status> statusOptional, MediaType mt)
+    throws InterruptedException {
+    QueryHandle handle = executeAndGetHandle(target, Optional.of(lensSessionId), Optional.of(query), conf, mt);
     if (statusOptional.isPresent()) {
-      return waitForQueryToFinish(target, lensSessionId, handle, statusOptional.get());
+      return waitForQueryToFinish(target, lensSessionId, handle, statusOptional.get(), mt);
     } else {
-      return waitForQueryToFinish(target, lensSessionId, handle);
+      return waitForQueryToFinish(target, lensSessionId, handle, mt);
     }
   }
 
-  public static void closeSessionFailFast(final WebTarget target, final LensSessionHandle sessionId) {
-    APIResult result = closeSession(target, sessionId);
+  public static void closeSessionFailFast(final WebTarget target, final LensSessionHandle sessionId, MediaType mt) {
+    APIResult result = closeSession(target, sessionId, mt);
     checkResponse(result);
   }
 
-  public static APIResult closeSession(final WebTarget target, final LensSessionHandle sessionId) {
-    return target.path("session").queryParam("sessionid", sessionId).request().delete(APIResult.class);
+  public static APIResult closeSession(final WebTarget target, final LensSessionHandle sessionId, MediaType mt) {
+    return target.path("session").queryParam("sessionid", sessionId).request(mt).delete(APIResult.class);
   }
 
-  public static String getCurrentDatabase(final WebTarget target, final LensSessionHandle sessionId) {
+  public static String getCurrentDatabase(final WebTarget target, final LensSessionHandle sessionId, MediaType mt) {
     WebTarget dbTarget = target.path("metastore").path("databases/current");
-    Invocation.Builder builder = dbTarget.queryParam("sessionid", sessionId).request(MediaType.APPLICATION_XML);
+    Invocation.Builder builder = dbTarget.queryParam("sessionid", sessionId).request(mt);
     String response = builder.get(String.class);
     return response;
   }
 
-  public static APIResult createCube(final WebTarget target, final LensSessionHandle sessionId, final XCube cube) {
-
-    return target.path("metastore").path("cubes").queryParam("sessionid", sessionId).request(MediaType.APPLICATION_XML)
-      .post(Entity.xml(cubeObjectFactory.createXCube(cube)), APIResult.class);
+  public static APIResult createCube(final WebTarget target, final LensSessionHandle sessionId, final XCube cube,
+    MediaType mt) {
+    return target.path("metastore").path("cubes").queryParam("sessionid", sessionId).request(mt)
+      .post(Entity.entity(
+        new GenericEntity<JAXBElement<XCube>>(cubeObjectFactory.createXCube(cube)){}, mt), APIResult.class);
   }
 
-  public static void createCubeFailFast(final WebTarget target, final LensSessionHandle sessionId, final XCube cube) {
-    APIResult result = createCube(target, sessionId, cube);
+  public static void createCubeFailFast(final WebTarget target, final LensSessionHandle sessionId, final XCube cube,
+    MediaType mt) {
+    APIResult result = createCube(target, sessionId, cube, mt);
     checkResponse(result);
   }
 
-  public static APIResult createFact(final WebTarget target, final LensSessionHandle sessionId,
-    final XFactTable factTable) {
-
-    FormDataMultiPart mp = createFormDataMultiPartForFact(sessionId, factTable);
-    return target.path("metastore").path("facts").queryParam("sessionid", sessionId).request(MediaType.APPLICATION_XML)
-      .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
-  }
-
   public static void createFactFailFast(final WebTarget target, final LensSessionHandle sessionId,
-    final XFactTable factTable) {
+    final XFactTable factTable, MediaType mt) {
 
-    APIResult result = createFact(target, sessionId, factTable);
+    APIResult result = target.path("metastore").path("facts").queryParam("sessionid", sessionId)
+      .request(mt).post(Entity.entity(
+          new GenericEntity<JAXBElement<XFactTable>>(cubeObjectFactory.createXFactTable(factTable)) {
+          }, mt),
+        APIResult.class);
     checkResponse(result);
   }
 
   public static APIResult setCurrentDatabase(final WebTarget target, final LensSessionHandle sessionId,
-    final String dbName) {
+    final String dbName, MediaType mt) {
 
     WebTarget dbTarget = target.path("metastore").path("databases/current");
-    return dbTarget.queryParam("sessionid", sessionId).request(MediaType.APPLICATION_XML)
+    return dbTarget.queryParam("sessionid", sessionId).request(mt)
       .put(Entity.xml(dbName),
         APIResult.class);
   }
 
   public static void setCurrentDatabaseFailFast(final WebTarget target, final LensSessionHandle sessionId,
-    final String dbName) {
+    final String dbName, MediaType mt) {
 
-    APIResult result = setCurrentDatabase(target, sessionId, dbName);
+    APIResult result = setCurrentDatabase(target, sessionId, dbName, mt);
     checkResponse(result);
   }
 
   public static APIResult createDatabase(final WebTarget target, final LensSessionHandle sessionId,
-    final String dbName) {
+    final String dbName, MediaType mt) {
 
     WebTarget dbTarget = target.path("metastore").path("databases");
-    return dbTarget.queryParam("sessionid", sessionId).request(MediaType.APPLICATION_XML)
+    return dbTarget.queryParam("sessionid", sessionId).request(mt)
       .post(Entity.xml(dbName), APIResult.class);
   }
 
   public static void createDatabaseFailFast(final WebTarget target, final LensSessionHandle sessionId,
-    final String dbName) {
+    final String dbName, MediaType mt) {
 
-    APIResult result = createDatabase(target, sessionId, dbName);
+    APIResult result = createDatabase(target, sessionId, dbName, mt);
     checkResponse(result);
   }
 
   public static void createAndSetCurrentDbFailFast(final WebTarget target, final LensSessionHandle sessionId,
-    final String dbName) {
+    final String dbName, MediaType mt) {
 
-    createDatabaseFailFast(target, sessionId, dbName);
-    setCurrentDatabaseFailFast(target, sessionId, dbName);
+    createDatabaseFailFast(target, sessionId, dbName, mt);
+    setCurrentDatabaseFailFast(target, sessionId, dbName, mt);
   }
 
   public static APIResult dropDatabaseFailFast(final WebTarget target, final LensSessionHandle sessionId,
-    String dbName) {
+    String dbName, MediaType mt) {
 
     WebTarget dbTarget = target.path("metastore").path("databases").path(dbName);
     return dbTarget.queryParam("cascade", "true")
-      .queryParam("sessionid", sessionId).request(MediaType.APPLICATION_XML).delete(APIResult.class);
+      .queryParam("sessionid", sessionId).request(mt).delete(APIResult.class);
   }
 
   private static void checkResponse(final APIResult result) {
@@ -219,32 +216,38 @@ public class RestAPITestUtil {
   }
 
   public static LensQuery waitForQueryToFinish(final WebTarget target, final LensSessionHandle lensSessionHandle,
-    final QueryHandle handle) throws InterruptedException {
-    LensQuery ctx = getLensQuery(target, lensSessionHandle, handle);
+    final QueryHandle handle, MediaType mt) throws InterruptedException {
+    LensQuery ctx = getLensQuery(target, lensSessionHandle, handle, mt);
     while (!ctx.getStatus().finished()) {
-      ctx = getLensQuery(target, lensSessionHandle, handle);
+      ctx = getLensQuery(target, lensSessionHandle, handle, mt);
       Thread.sleep(1000);
     }
     return ctx;
   }
 
   public static LensQuery waitForQueryToFinish(final WebTarget target, final LensSessionHandle lensSessionHandle,
-    final QueryHandle handle, QueryStatus.Status status) throws InterruptedException {
-    LensQuery lensQuery = waitForQueryToFinish(target, lensSessionHandle, handle);
+    final QueryHandle handle, QueryStatus.Status status, MediaType mt) throws InterruptedException {
+    LensQuery lensQuery = waitForQueryToFinish(target, lensSessionHandle, handle, mt);
     assertEquals(lensQuery.getStatus().getStatus(), status);
     return lensQuery;
   }
 
   public static LensQuery getLensQuery(final WebTarget target, final LensSessionHandle lensSessionHandle,
-    final QueryHandle handle) {
-    return target.path("queryapi/queries").path(handle.toString()).queryParam("sessionid", lensSessionHandle).request()
-      .get(LensQuery.class);
+    final QueryHandle handle, MediaType mt) {
+    return target.path("queryapi/queries").path(handle.toString()).queryParam("sessionid", lensSessionHandle)
+      .request(mt).get(LensQuery.class);
   }
 
-  public static QueryResult getLensQueryResult(final WebTarget target, final LensSessionHandle lensSessionHandle,
-    final QueryHandle handle) {
+  public static String getLensQueryResultAsString(final WebTarget target, final LensSessionHandle lensSessionHandle,
+    final QueryHandle handle, MediaType mt) {
+    return target.path("queryapi/queries").path(handle.toString()).path("resultset")
+      .queryParam("sessionid", lensSessionHandle).request(mt).get(String.class);
+  }
+
+  public static PersistentQueryResult getLensQueryResult(final WebTarget target,
+    final LensSessionHandle lensSessionHandle, final QueryHandle handle, MediaType mt) {
     return target.path("queryapi/queries").path(handle.toString()).path("resultset")
-      .queryParam("sessionid", lensSessionHandle).request().get(QueryResult.class);
+      .queryParam("sessionid", lensSessionHandle).request(mt).get(PersistentQueryResult.class);
   }
 
   public static Response getLensQueryHttpResult(final WebTarget target, final LensSessionHandle lensSessionHandle,

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/healthcheck/TestHealthChecks.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/healthcheck/TestHealthChecks.java b/lens-server/src/test/java/org/apache/lens/server/healthcheck/TestHealthChecks.java
index 8e22c7a..56c67fb 100644
--- a/lens-server/src/test/java/org/apache/lens/server/healthcheck/TestHealthChecks.java
+++ b/lens-server/src/test/java/org/apache/lens/server/healthcheck/TestHealthChecks.java
@@ -20,24 +20,18 @@ package org.apache.lens.server.healthcheck;
 
 import static org.testng.Assert.*;
 
-import javax.ws.rs.core.Application;
-
 import org.apache.lens.server.EventServiceImpl;
-import org.apache.lens.server.LensJerseyTest;
+import org.apache.lens.server.LensAllApplicationJerseyTest;
 import org.apache.lens.server.LensServices;
 import org.apache.lens.server.api.LensService;
 import org.apache.lens.server.api.health.HealthStatus;
 import org.apache.lens.server.api.query.QueryExecutionService;
 import org.apache.lens.server.metastore.CubeMetastoreServiceImpl;
-import org.apache.lens.server.metastore.MetastoreApp;
 import org.apache.lens.server.metrics.MetricsServiceImpl;
 import org.apache.lens.server.quota.QuotaServiceImpl;
 import org.apache.lens.server.scheduler.SchedulerServiceImpl;
 import org.apache.lens.server.session.HiveSessionService;
 
-import org.glassfish.jersey.client.ClientConfig;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
-
 import org.testng.annotations.AfterTest;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
@@ -45,7 +39,7 @@ import org.testng.annotations.Test;
 import com.codahale.metrics.health.HealthCheck;
 
 @Test(groups = "unit-test")
-public class TestHealthChecks extends LensJerseyTest {
+public class TestHealthChecks extends LensAllApplicationJerseyTest {
   @BeforeTest
   public void setUp() throws Exception {
     super.setUp();
@@ -56,16 +50,6 @@ public class TestHealthChecks extends LensJerseyTest {
     super.tearDown();
   }
 
-  @Override
-  protected Application configure() {
-    return new MetastoreApp();
-  }
-
-  @Override
-  protected void configureClient(ClientConfig config) {
-    config.register(MultiPartFeature.class);
-  }
-
   @Test
   public void testCubeMetastoreServiceHealth() throws Exception {
     checkHealth(CubeMetastoreServiceImpl.NAME);


[45/51] [abbrv] lens git commit: LENS-920 : Fix issues in producing and consuming json for all api

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index ef8c1aa..d9b7679 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -33,10 +33,7 @@ import java.util.*;
 import javax.ws.rs.NotFoundException;
 import javax.ws.rs.client.Entity;
 import javax.ws.rs.client.WebTarget;
-import javax.ws.rs.core.Application;
-import javax.ws.rs.core.GenericType;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import javax.ws.rs.core.*;
 
 import org.apache.lens.api.APIResult;
 import org.apache.lens.api.LensConf;
@@ -77,16 +74,11 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
 
-import org.glassfish.jersey.client.ClientConfig;
 import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataMultiPart;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
 import org.glassfish.jersey.test.TestProperties;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.AfterTest;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Test;
+import org.testng.annotations.*;
 
 import com.codahale.metrics.MetricRegistry;
 import com.google.common.base.Optional;
@@ -130,7 +122,7 @@ public class TestQueryService extends LensJerseyTest {
     super.setUp();
     queryService = LensServices.get().getService(QueryExecutionService.NAME);
     metricsSvc = LensServices.get().getService(MetricsService.NAME);
-    Map<String, String> sessionconf = new HashMap<String, String>();
+    Map<String, String> sessionconf = new HashMap<>();
     sessionconf.put("test.session.key", "svalue");
     lensSessionId = queryService.openSession("foo@localhost", "bar", sessionconf); // @localhost should be removed
     // automatically
@@ -167,17 +159,6 @@ public class TestQueryService extends LensJerseyTest {
     return new QueryServiceTestApp();
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.glassfish.jersey.test.JerseyTest#configureClient(org.glassfish.jersey.client.ClientConfig)
-   */
-  @Override
-  protected void configureClient(ClientConfig config) {
-    config.register(MultiPartFeature.class);
-    config.register(LensJAXBContextResolver.class);
-  }
-
   /** The test table. */
   public static final String TEST_TABLE = "TEST_TABLE";
 
@@ -188,7 +169,7 @@ public class TestQueryService extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void createTable(String tblName) throws InterruptedException {
-    LensServerTestUtil.createTable(tblName, target(), lensSessionId);
+    LensServerTestUtil.createTable(tblName, target(), lensSessionId, defaultMT);
   }
 
   /**
@@ -199,7 +180,7 @@ public class TestQueryService extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void loadData(String tblName, final String testDataFile) throws InterruptedException {
-    LensServerTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId);
+    LensServerTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId, defaultMT);
   }
 
   /**
@@ -209,19 +190,17 @@ public class TestQueryService extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void dropTable(String tblName) throws InterruptedException {
-    LensServerTestUtil.dropTable(tblName, target(), lensSessionId);
+    LensServerTestUtil.dropTable(tblName, target(), lensSessionId, defaultMT);
   }
 
-  // test get a random query, should return 400
-
   /**
-   * Test get random query.
+   * Test get random query. should return 400
    */
-  @Test
-  public void testGetRandomQuery() {
+  @Test(dataProvider = "mediaTypeData")
+  public void testGetRandomQuery(MediaType mt) {
     final WebTarget target = target().path("queryapi/queries");
 
-    Response rs = target.path("random").queryParam("sessionid", lensSessionId).request().get();
+    Response rs = target.path("random").queryParam("sessionid", lensSessionId).request(mt).get();
     assertEquals(rs.getStatus(), 400);
   }
 
@@ -229,7 +208,7 @@ public class TestQueryService extends LensJerseyTest {
   public void testLoadingMultipleDrivers() {
     Collection<LensDriver> drivers = queryService.getDrivers();
     assertEquals(drivers.size(), 4);
-    Set<String> driverNames = new HashSet<String>(drivers.size());
+    Set<String> driverNames = new HashSet<>(drivers.size());
     for (LensDriver driver : drivers) {
       assertEquals(driver.getConf().get("lens.driver.test.drivername"), driver.getFullyQualifiedName());
       driverNames.add(driver.getFullyQualifiedName());
@@ -242,19 +221,17 @@ public class TestQueryService extends LensJerseyTest {
    *
    * @throws InterruptedException the interrupted exception
    */
-  @Test
-  public void testRewriteFailureInExecute() throws InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testRewriteFailureInExecute(MediaType mt) throws InterruptedException {
     final WebTarget target = target().path("queryapi/queries");
     LensConf conf = new LensConf();
     final FormDataMultiPart mp = new FormDataMultiPart();
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId, mt));
     mp.bodyPart(
       new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID from non_exist_table"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
-    final Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf, mt));
+    final Response response = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
     assertEquals(response.getStatus(), BAD_REQUEST.getStatusCode());
   }
 
@@ -263,10 +240,10 @@ public class TestQueryService extends LensJerseyTest {
    *
    * @throws InterruptedException the interrupted exception
    */
-  @Test
-  public void testLaunchFail() throws InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testLaunchFail(MediaType mt) throws InterruptedException {
     LensQuery lensQuery = executeAndWaitForQueryToFinish(target(), lensSessionId, "select fail from non_exist",
-      Optional.<LensConf>absent(), Optional.of(Status.FAILED));
+      Optional.<LensConf>absent(), Optional.of(Status.FAILED), mt);
     assertTrue(lensQuery.getSubmissionTime() > 0);
     assertEquals(lensQuery.getLaunchTime(), 0);
     assertEquals(lensQuery.getDriverStartTime(), 0);
@@ -282,8 +259,8 @@ public class TestQueryService extends LensJerseyTest {
    *
    * @throws InterruptedException the interrupted exception
    */
-  @Test
-  public void testQueriesAPI() throws InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testQueriesAPI(MediaType mt) throws InterruptedException {
     // test post execute op
     final WebTarget target = target().path("queryapi/queries");
 
@@ -292,7 +269,7 @@ public class TestQueryService extends LensJerseyTest {
     long finishedQueries = metricsSvc.getFinishedQueries();
 
     QueryHandle handle = executeAndGetHandle(target(), Optional.of(lensSessionId), Optional.of("select ID from "
-      + TEST_TABLE), Optional.<LensConf>absent());
+      + TEST_TABLE), Optional.<LensConf>absent(), mt);
 
     // Get all queries
     // XML
@@ -300,7 +277,7 @@ public class TestQueryService extends LensJerseyTest {
       .get(new GenericType<List<QueryHandle>>() {});
     assertTrue(allQueriesXML.size() >= 1);
 
-    List<QueryHandle> allQueries = target.queryParam("sessionid", lensSessionId).request()
+    List<QueryHandle> allQueries = target.queryParam("sessionid", lensSessionId).request(mt)
       .get(new GenericType<List<QueryHandle>>() {});
     assertTrue(allQueries.size() >= 1);
     assertTrue(allQueries.contains(handle));
@@ -309,17 +286,17 @@ public class TestQueryService extends LensJerseyTest {
       .request(MediaType.APPLICATION_XML).get(String.class);
     log.debug("query XML:{}", queryXML);
 
-    Response response = target.path(handle.toString() + "001").queryParam("sessionid", lensSessionId).request().get();
+    Response response = target.path(handle.toString() + "001").queryParam("sessionid", lensSessionId).request(mt).get();
     assertEquals(response.getStatus(), 404);
 
-    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(mt)
       .get(LensQuery.class);
 
     // wait till the query finishes
     QueryStatus stat = ctx.getStatus();
     while (!stat.finished()) {
       Thread.sleep(1000);
-      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(mt).get(LensQuery.class);
       stat = ctx.getStatus();
       /*
       Commented due to same issue as: https://issues.apache.org/jira/browse/LENS-683
@@ -343,10 +320,10 @@ public class TestQueryService extends LensJerseyTest {
     LensConf conf = new LensConf();
     conf.addProperty("my.property", "myvalue");
     confpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     confpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
-    APIResult updateConf = target.path(handle.toString()).request()
+      mt));
+    APIResult updateConf = target.path(handle.toString()).request(mt)
       .put(Entity.entity(confpart, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
     assertEquals(updateConf.getStatus(), APIResult.Status.FAILED);
   }
@@ -358,19 +335,19 @@ public class TestQueryService extends LensJerseyTest {
    *
    * @throws InterruptedException the interrupted exception
    */
-  @Test
-  public void testExplainQuery() throws InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testExplainQuery(MediaType mt) throws InterruptedException {
     final WebTarget target = target().path("queryapi/queries");
 
     final FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID from " + TEST_TABLE));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "explain"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    final QueryPlan plan = target.request()
+    final QueryPlan plan = target.request(mt)
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
         new GenericType<LensAPIResult<QueryPlan>>() {}).getData();
     assertEquals(plan.getTablesQueried().size(), 1);
@@ -382,14 +359,14 @@ public class TestQueryService extends LensJerseyTest {
 
     final FormDataMultiPart mp2 = new FormDataMultiPart();
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(),
       "select ID from " + TEST_TABLE));
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "explain_and_prepare"));
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    final QueryPlan plan2 = ptarget.request().post(Entity.entity(mp2, MediaType.MULTIPART_FORM_DATA_TYPE),
+    final QueryPlan plan2 = ptarget.request(mt).post(Entity.entity(mp2, MediaType.MULTIPART_FORM_DATA_TYPE),
       new GenericType<LensAPIResult<QueryPlan>>() {}).getData();
     assertEquals(plan2.getTablesQueried().size(), 1);
     assertTrue(plan2.getTablesQueried().get(0).endsWith(TEST_TABLE.toLowerCase()));
@@ -404,20 +381,20 @@ public class TestQueryService extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    * @throws UnsupportedEncodingException
    */
-  @Test
-  public void testExplainFailure() throws InterruptedException, UnsupportedEncodingException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testExplainFailure(MediaType mt) throws InterruptedException, UnsupportedEncodingException {
     final WebTarget target = target().path("queryapi/queries");
 
     final FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select NO_ID from "
       + TEST_TABLE));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "explain"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    final Response responseExplain = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
+    final Response responseExplain = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
 
     assertEquals(responseExplain.getStatus(), BAD_REQUEST.getStatusCode());
 
@@ -426,14 +403,14 @@ public class TestQueryService extends LensJerseyTest {
 
     final FormDataMultiPart mp2 = new FormDataMultiPart();
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select NO_ID from "
       + TEST_TABLE));
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "explain_and_prepare"));
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    final Response responseExplainAndPrepare = target.request().post(
+    final Response responseExplainAndPrepare = ptarget.request(mt).post(
       Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
 
     assertEquals(responseExplainAndPrepare.getStatus(), BAD_REQUEST.getStatusCode());
@@ -445,19 +422,18 @@ public class TestQueryService extends LensJerseyTest {
    * @throws IOException          Signals that an I/O exception has occurred.
    * @throws InterruptedException the interrupted exception
    */
-  @Test
-  public void testHiveSemanticFailure() throws InterruptedException, IOException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testHiveSemanticFailure(MediaType mt) throws InterruptedException, IOException {
     final WebTarget target = target().path("queryapi/queries");
-
     final FormDataMultiPart mp = new FormDataMultiPart();
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId, mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), " select ID from NOT_EXISTS"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
+    Response response = target.request(mt).post(Entity.entity(mp, MediaType
+      .MULTIPART_FORM_DATA_TYPE));
     LensAPIResult result = response.readEntity(LensAPIResult.class);
     List<LensErrorTO> childErrors = result.getLensErrorTO().getChildErrors();
     boolean hiveSemanticErrorExists = false;
@@ -482,31 +458,31 @@ public class TestQueryService extends LensJerseyTest {
    *
    * @throws InterruptedException the interrupted exception
    */
-  @Test
-  public void testPrepareQuery() throws InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testPrepareQuery(MediaType mt) throws InterruptedException {
     final WebTarget target = target().path("queryapi/preparedqueries");
 
     final FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID from " + TEST_TABLE));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "prepare"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("queryName").build(), "testQuery1"));
 
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    final QueryPrepareHandle pHandle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+    final QueryPrepareHandle pHandle = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
       new GenericType<LensAPIResult<QueryPrepareHandle>>() {}).getData();
 
     // Get all prepared queries
-    List<QueryPrepareHandle> allQueries = (List<QueryPrepareHandle>) target.queryParam("sessionid", lensSessionId)
-      .queryParam("queryName", "testQuery1").request().get(new GenericType<List<QueryPrepareHandle>>() {
+    List<QueryPrepareHandle> allQueries = target.queryParam("sessionid", lensSessionId)
+      .queryParam("queryName", "testQuery1").request(mt).get(new GenericType<List<QueryPrepareHandle>>() {
       });
     assertTrue(allQueries.size() >= 1);
     assertTrue(allQueries.contains(pHandle));
 
-    LensPreparedQuery ctx = target.path(pHandle.toString()).queryParam("sessionid", lensSessionId).request()
+    LensPreparedQuery ctx = target.path(pHandle.toString()).queryParam("sessionid", lensSessionId).request(mt)
       .get(LensPreparedQuery.class);
     assertTrue(ctx.getUserQuery().equalsIgnoreCase("select ID from " + TEST_TABLE));
     assertTrue(ctx.getDriverQuery().equalsIgnoreCase("select ID from " + TEST_TABLE));
@@ -519,39 +495,40 @@ public class TestQueryService extends LensJerseyTest {
     LensConf conf = new LensConf();
     conf.addProperty("my.property", "myvalue");
     confpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     confpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
-    APIResult updateConf = target.path(pHandle.toString()).request()
+      mt));
+    APIResult updateConf = target.path(pHandle.toString()).request(mt)
       .put(Entity.entity(confpart, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
     assertEquals(updateConf.getStatus(), APIResult.Status.SUCCEEDED);
 
-    ctx = target.path(pHandle.toString()).queryParam("sessionid", lensSessionId).request().get(LensPreparedQuery.class);
+    ctx = target.path(pHandle.toString()).queryParam("sessionid", lensSessionId).request(mt).get(LensPreparedQuery
+      .class);
     assertEquals(ctx.getConf().getProperties().get("my.property"), "myvalue");
 
-    QueryHandle handle1 = target.path(pHandle.toString()).request()
+    QueryHandle handle1 = target.path(pHandle.toString()).request(mt)
       .post(Entity.entity(confpart, MediaType.MULTIPART_FORM_DATA_TYPE), QueryHandle.class);
 
     // Override query name
     confpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("queryName").build(), "testQueryName2"));
     // do post once again
-    QueryHandle handle2 = target.path(pHandle.toString()).request()
+    QueryHandle handle2 = target.path(pHandle.toString()).request(mt)
       .post(Entity.entity(confpart, MediaType.MULTIPART_FORM_DATA_TYPE), QueryHandle.class);
     assertNotEquals(handle1, handle2);
 
-    LensQuery ctx1 = waitForQueryToFinish(target(), lensSessionId, handle1, Status.SUCCESSFUL);
+    LensQuery ctx1 = waitForQueryToFinish(target(), lensSessionId, handle1, Status.SUCCESSFUL, mt);
     assertEquals(ctx1.getQueryName().toLowerCase(), "testquery1");
 
-    LensQuery ctx2 = waitForQueryToFinish(target(), lensSessionId, handle2, Status.SUCCESSFUL);
+    LensQuery ctx2 = waitForQueryToFinish(target(), lensSessionId, handle2, Status.SUCCESSFUL, mt);
     assertEquals(ctx2.getQueryName().toLowerCase(), "testqueryname2");
 
     // destroy prepared
-    APIResult result = target.path(pHandle.toString()).queryParam("sessionid", lensSessionId).request()
+    APIResult result = target.path(pHandle.toString()).queryParam("sessionid", lensSessionId).request(mt)
       .delete(APIResult.class);
     assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
 
     // Post on destroyed query
-    Response response = target.path(pHandle.toString()).request()
+    Response response = target.path(pHandle.toString()).request(mt)
       .post(Entity.entity(confpart, MediaType.MULTIPART_FORM_DATA_TYPE), Response.class);
     assertEquals(response.getStatus(), 404);
   }
@@ -561,19 +538,19 @@ public class TestQueryService extends LensJerseyTest {
    *
    * @throws InterruptedException the interrupted exception
    */
-  @Test
-  public void testExplainAndPrepareQuery() throws InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testExplainAndPrepareQuery(MediaType mt) throws InterruptedException {
     final WebTarget target = target().path("queryapi/preparedqueries");
 
     final FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID from " + TEST_TABLE));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "explain_and_prepare"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    final QueryPlan plan = target.request()
+    final QueryPlan plan = target.request(mt)
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
         new GenericType<LensAPIResult<QueryPlan>>() {}).getData();
 
@@ -582,7 +559,7 @@ public class TestQueryService extends LensJerseyTest {
     assertNotNull(plan.getPrepareHandle());
 
     LensPreparedQuery ctx = target.path(plan.getPrepareHandle().toString()).queryParam("sessionid", lensSessionId)
-      .request().get(LensPreparedQuery.class);
+      .request(mt).get(LensPreparedQuery.class);
     assertTrue(ctx.getUserQuery().equalsIgnoreCase("select ID from " + TEST_TABLE));
     assertTrue(ctx.getDriverQuery().equalsIgnoreCase("select ID from " + TEST_TABLE));
     //both drivers hive/hive1 and hive/hive2 are capable of handling the query as they point to the same hive server
@@ -594,35 +571,35 @@ public class TestQueryService extends LensJerseyTest {
     LensConf conf = new LensConf();
     conf.addProperty("my.property", "myvalue");
     confpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     confpart.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
-    APIResult updateConf = target.path(plan.getPrepareHandle().toString()).request()
+      mt));
+    APIResult updateConf = target.path(plan.getPrepareHandle().toString()).request(mt)
       .put(Entity.entity(confpart, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
     assertEquals(updateConf.getStatus(), APIResult.Status.SUCCEEDED);
 
-    ctx = target.path(plan.getPrepareHandle().toString()).queryParam("sessionid", lensSessionId).request()
+    ctx = target.path(plan.getPrepareHandle().toString()).queryParam("sessionid", lensSessionId).request(mt)
       .get(LensPreparedQuery.class);
     assertEquals(ctx.getConf().getProperties().get("my.property"), "myvalue");
 
-    QueryHandle handle1 = target.path(plan.getPrepareHandle().toString()).request()
+    QueryHandle handle1 = target.path(plan.getPrepareHandle().toString()).request(mt)
       .post(Entity.entity(confpart, MediaType.MULTIPART_FORM_DATA_TYPE), QueryHandle.class);
 
     // do post once again
-    QueryHandle handle2 = target.path(plan.getPrepareHandle().toString()).request()
+    QueryHandle handle2 = target.path(plan.getPrepareHandle().toString()).request(mt)
       .post(Entity.entity(confpart, MediaType.MULTIPART_FORM_DATA_TYPE), QueryHandle.class);
     assertNotEquals(handle1, handle2);
 
-    waitForQueryToFinish(target(), lensSessionId, handle1, Status.SUCCESSFUL);
-    waitForQueryToFinish(target(), lensSessionId, handle2, Status.SUCCESSFUL);
+    waitForQueryToFinish(target(), lensSessionId, handle1, Status.SUCCESSFUL, mt);
+    waitForQueryToFinish(target(), lensSessionId, handle2, Status.SUCCESSFUL, mt);
 
     // destroy prepared
-    APIResult result = target.path(plan.getPrepareHandle().toString()).queryParam("sessionid", lensSessionId).request()
-      .delete(APIResult.class);
+    APIResult result = target.path(plan.getPrepareHandle().toString()).queryParam("sessionid", lensSessionId)
+      .request(mt).delete(APIResult.class);
     assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
 
     // Post on destroyed query
-    Response response = target.path(plan.getPrepareHandle().toString()).request()
+    Response response = target.path(plan.getPrepareHandle().toString()).request(mt)
       .post(Entity.entity(confpart, MediaType.MULTIPART_FORM_DATA_TYPE), Response.class);
     assertEquals(response.getStatus(), 404);
 
@@ -637,8 +614,8 @@ public class TestQueryService extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    * @throws IOException          Signals that an I/O exception has occurred.
    */
-  @Test
-  public void testExecuteAsync() throws InterruptedException, IOException, LensException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testExecuteAsync(MediaType mt) throws InterruptedException, IOException, LensException {
     // test post execute op
     final WebTarget target = target().path("queryapi/queries");
 
@@ -647,19 +624,19 @@ public class TestQueryService extends LensJerseyTest {
 
     final FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID, IDSTR from "
       + TEST_TABLE));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
-    final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+      mt));
+    final QueryHandle handle = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
       new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
     assertNotNull(handle);
 
     // Get query
-    LensQuery lensQuery = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+    LensQuery lensQuery = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(mt)
       .get(LensQuery.class);
     assertTrue(lensQuery.getStatus().getStatus().equals(Status.QUEUED)
       || lensQuery.getStatus().getStatus().equals(Status.LAUNCHED)
@@ -669,7 +646,8 @@ public class TestQueryService extends LensJerseyTest {
     // wait till the query finishes
     QueryStatus stat = lensQuery.getStatus();
     while (!stat.finished()) {
-      lensQuery = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+      lensQuery = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(mt).get(LensQuery
+        .class);
       stat = lensQuery.getStatus();
       /* Commented and jira ticket raised for correction: https://issues.apache.org/jira/browse/LENS-683
       switch (stat.getStatus()) {
@@ -694,17 +672,18 @@ public class TestQueryService extends LensJerseyTest {
     assertEquals(ctx.getPhase1RewrittenQuery(), ctx.getUserQuery()); //Since there is no rewriter in this test
     assertEquals(lensQuery.getStatus().getStatus(), QueryStatus.Status.SUCCESSFUL);
 
-    validatePersistedResult(handle, target(), lensSessionId, new String[][]{{"ID", "INT"}, {"IDSTR", "STRING"}}, true);
+    validatePersistedResult(handle, target(), lensSessionId, new String[][]{{"ID", "INT"}, {"IDSTR", "STRING"}},
+      true, mt);
 
     // test cancel query
-    final QueryHandle handle2 = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+    final QueryHandle handle2 = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
       new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
     assertNotNull(handle2);
-    APIResult result = target.path(handle2.toString()).queryParam("sessionid", lensSessionId).request()
+    APIResult result = target.path(handle2.toString()).queryParam("sessionid", lensSessionId).request(mt)
       .delete(APIResult.class);
     // cancel would fail query is already successful
-    LensQuery ctx2 = target.path(handle2.toString()).queryParam("sessionid", lensSessionId).request()
+    LensQuery ctx2 = target.path(handle2.toString()).queryParam("sessionid", lensSessionId).request(mt)
       .get(LensQuery.class);
     if (result.getStatus().equals(APIResult.Status.FAILED)) {
       assertEquals(ctx2.getStatus().getStatus(), QueryStatus.Status.SUCCESSFUL,
@@ -720,7 +699,7 @@ public class TestQueryService extends LensJerseyTest {
     log.info("Starting httpendpoint test");
     final FormDataMultiPart mp3 = new FormDataMultiPart();
     mp3.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp3.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID, IDSTR from "
       + TEST_TABLE));
     mp3.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
@@ -728,12 +707,12 @@ public class TestQueryService extends LensJerseyTest {
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, "true");
 
     mp3.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
-    final QueryHandle handle3 = target.request().post(Entity.entity(mp3, MediaType.MULTIPART_FORM_DATA_TYPE),
+      mt));
+    final QueryHandle handle3 = target.request(mt).post(Entity.entity(mp3, MediaType.MULTIPART_FORM_DATA_TYPE),
       new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
     // Get query
-    waitForQueryToFinish(target(), lensSessionId, handle3, Status.SUCCESSFUL);
+    waitForQueryToFinish(target(), lensSessionId, handle3, Status.SUCCESSFUL, mt);
     validateHttpEndPoint(target(), null, handle3, null);
   }
 
@@ -747,19 +726,19 @@ public class TestQueryService extends LensJerseyTest {
    * @throws IOException Signals that an I/O exception has occurred.
    */
   static void validatePersistedResult(QueryHandle handle, WebTarget parent, LensSessionHandle lensSessionId,
-    String[][] schema, boolean isDir) throws IOException {
+    String[][] schema, boolean isDir, MediaType mt) throws IOException {
     final WebTarget target = parent.path("queryapi/queries");
     // fetch results
     validateResultSetMetadata(handle, "",
       schema,
-      parent, lensSessionId);
+      parent, lensSessionId, mt);
 
     String presultset = target.path(handle.toString()).path("resultset").queryParam("sessionid", lensSessionId)
-      .request().get(String.class);
+      .request(mt).get(String.class);
     System.out.println("PERSISTED RESULT:" + presultset);
 
     PersistentQueryResult resultset = target.path(handle.toString()).path("resultset")
-      .queryParam("sessionid", lensSessionId).request().get(PersistentQueryResult.class);
+      .queryParam("sessionid", lensSessionId).request(mt).get(PersistentQueryResult.class);
     validatePersistentResult(resultset, handle, isDir);
 
     if (isDir) {
@@ -781,7 +760,7 @@ public class TestQueryService extends LensJerseyTest {
     assertTrue(resultset.getPersistedURI().contains(handle.toString()));
     Path actualPath = new Path(resultset.getPersistedURI());
     FileSystem fs = actualPath.getFileSystem(new Configuration());
-    List<String> actualRows = new ArrayList<String>();
+    List<String> actualRows = new ArrayList<>();
     if (fs.getFileStatus(actualPath).isDir()) {
       assertTrue(isDir);
       for (FileStatus fstat : fs.listStatus(actualPath)) {
@@ -832,7 +811,7 @@ public class TestQueryService extends LensJerseyTest {
     BufferedReader br = null;
     try {
       br = new BufferedReader(new InputStreamReader(in));
-      String line = "";
+      String line;
 
       while ((line = br.readLine()) != null) {
         actualRows.add(line);
@@ -951,8 +930,8 @@ public class TestQueryService extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    * @throws IOException          Signals that an I/O exception has occurred.
    */
-  @Test
-  public void testExecuteAsyncInMemoryResult() throws InterruptedException, IOException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testExecuteAsyncInMemoryResult(MediaType mt) throws InterruptedException, IOException {
     // test post execute op
     final WebTarget target = target().path("queryapi/queries");
 
@@ -960,28 +939,26 @@ public class TestQueryService extends LensJerseyTest {
     LensConf conf = new LensConf();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID, IDSTR from "
       + TEST_TABLE));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
-    final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+      mt));
+    final QueryHandle handle = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
       new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
     assertNotNull(handle);
 
     // Get query
-    waitForQueryToFinish(target(), lensSessionId, handle, Status.SUCCESSFUL);
+    waitForQueryToFinish(target(), lensSessionId, handle, Status.SUCCESSFUL, mt);
 
     // fetch results
     validateResultSetMetadata(handle, "",
       new String[][]{{"ID", "INT"}, {"IDSTR", "STRING"}},
-      target(), lensSessionId);
+      target(), lensSessionId, mt);
 
-    InMemoryQueryResult resultset = target.path(handle.toString()).path("resultset")
-      .queryParam("sessionid", lensSessionId).request().get(InMemoryQueryResult.class);
-    validateInmemoryResult(resultset);
+    validateInmemoryResult(target, handle, mt);
 
     validNotFoundForHttpResult(target(), lensSessionId, handle);
     waitForPurge(0, queryService.finishedQueries);
@@ -1004,12 +981,12 @@ public class TestQueryService extends LensJerseyTest {
       conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, "false");
       conf.addProperty(LensConfConstants.QUERY_MAIL_NOTIFY, "false");
       mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-          MediaType.APPLICATION_XML_TYPE));
+          defaultMT));
       mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID, IDSTR from "
           + TEST_TABLE));
       mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
       mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-          MediaType.APPLICATION_XML_TYPE));
+        defaultMT));
 
       final QueryHandle handle =
           target
@@ -1019,7 +996,7 @@ public class TestQueryService extends LensJerseyTest {
                   }).getData();
       assertNotNull(handle);
 
-      waitForQueryToFinish(target(), lensSessionId, handle, Status.SUCCESSFUL);
+      waitForQueryToFinish(target(), lensSessionId, handle, Status.SUCCESSFUL, defaultMT);
 
       // Check TTL
       QueryContext ctx = queryService.getQueryContext(lensSessionId, handle);
@@ -1048,8 +1025,8 @@ public class TestQueryService extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    * @throws IOException          Signals that an I/O exception has occurred.
    */
-  @Test
-  public void testExecuteAsyncTempTable() throws InterruptedException, IOException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testExecuteAsyncTempTable(MediaType mt) throws InterruptedException, IOException {
     // test post execute op
     final WebTarget target = target().path("queryapi/queries");
 
@@ -1057,61 +1034,59 @@ public class TestQueryService extends LensJerseyTest {
     LensConf conf = new LensConf();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
     drop.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     drop.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(),
       "drop table if exists temp_output"));
     drop.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     drop.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
-    final QueryHandle dropHandle = target.request().post(Entity.entity(drop, MediaType.MULTIPART_FORM_DATA_TYPE),
+      mt));
+    final QueryHandle dropHandle = target.request(mt).post(Entity.entity(drop, MediaType.MULTIPART_FORM_DATA_TYPE),
       new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
     assertNotNull(dropHandle);
 
     // Get query
-    waitForQueryToFinish(target(), lensSessionId, dropHandle, Status.SUCCESSFUL);
+    waitForQueryToFinish(target(), lensSessionId, dropHandle, Status.SUCCESSFUL, mt);
 
     final FormDataMultiPart mp = new FormDataMultiPart();
     conf = new LensConf();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(),
       "create table temp_output as select ID, IDSTR from " + TEST_TABLE));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
-    final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+      mt));
+    final QueryHandle handle = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
       new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
     assertNotNull(handle);
 
     // Get query
-    waitForQueryToFinish(target(), lensSessionId, handle, Status.SUCCESSFUL);
+    waitForQueryToFinish(target(), lensSessionId, handle, Status.SUCCESSFUL, mt);
 
     String select = "SELECT * FROM temp_output";
     final FormDataMultiPart fetch = new FormDataMultiPart();
     fetch.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     fetch.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), select));
     fetch.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     fetch.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
-    final QueryHandle handle2 = target.request().post(Entity.entity(fetch, MediaType.MULTIPART_FORM_DATA_TYPE),
+      mt));
+    final QueryHandle handle2 = target.request(mt).post(Entity.entity(fetch, MediaType.MULTIPART_FORM_DATA_TYPE),
       new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
     assertNotNull(handle2);
 
     // Get query
-    waitForQueryToFinish(target(), lensSessionId, handle2, Status.SUCCESSFUL);
+    waitForQueryToFinish(target(), lensSessionId, handle2, Status.SUCCESSFUL, mt);
 
     // fetch results
     validateResultSetMetadata(handle2, "temp_output.", new String[][]{{"ID", "INT"}, {"IDSTR", "STRING"}},
-      target(), lensSessionId);
+      target(), lensSessionId, mt);
 
-    InMemoryQueryResult resultset = target.path(handle2.toString()).path("resultset")
-      .queryParam("sessionid", lensSessionId).request().get(InMemoryQueryResult.class);
-    validateInmemoryResult(resultset);
+    validateInmemoryResult(target, handle2, mt);
   }
 
   /**
@@ -1121,10 +1096,11 @@ public class TestQueryService extends LensJerseyTest {
    * @param parent        the parent
    * @param lensSessionId the lens session id
    */
-  static void validateResultSetMetadata(QueryHandle handle, WebTarget parent, LensSessionHandle lensSessionId) {
+  static void validateResultSetMetadata(QueryHandle handle, WebTarget parent, LensSessionHandle lensSessionId,
+    MediaType mt) {
     validateResultSetMetadata(handle, "",
       new String[][]{{"ID", "INT"}, {"IDSTR", "STRING"}, {"IDARR", "ARRAY"}, {"IDSTRARR", "ARRAY"}},
-      parent, lensSessionId);
+      parent, lensSessionId, mt);
   }
 
   /**
@@ -1136,11 +1112,11 @@ public class TestQueryService extends LensJerseyTest {
    * @param lensSessionId  the lens session id
    */
   static void validateResultSetMetadata(QueryHandle handle, String outputTablePfx, String[][] columns, WebTarget parent,
-    LensSessionHandle lensSessionId) {
+    LensSessionHandle lensSessionId, MediaType mt) {
     final WebTarget target = parent.path("queryapi/queries");
 
     QueryResultSetMetadata metadata = target.path(handle.toString()).path("resultsetmetadata")
-      .queryParam("sessionid", lensSessionId).request().get(QueryResultSetMetadata.class);
+      .queryParam("sessionid", lensSessionId).request(mt).get(QueryResultSetMetadata.class);
     assertEquals(metadata.getColumns().size(), columns.length);
     for (int i = 0; i < columns.length; i++) {
       assertTrue(
@@ -1150,6 +1126,28 @@ public class TestQueryService extends LensJerseyTest {
       assertEquals(columns[i][1].toLowerCase(), metadata.getColumns().get(i).getType().name().toLowerCase());
     }
   }
+  private void validateInmemoryResult(WebTarget target, QueryHandle handle, MediaType mt) throws IOException {
+    if (mt.equals(MediaType.APPLICATION_JSON_TYPE)) {
+      String resultSet = target.path(handle.toString()).path("resultset")
+        .queryParam("sessionid", lensSessionId).request(mt).get(String.class);
+      // this is being done because json unmarshalling does not work to construct java Objects back
+      assertEquals(resultSet.replaceAll("\\W", ""), expectedJsonResult().replaceAll("\\W", ""));
+    } else {
+      InMemoryQueryResult resultSet = target.path(handle.toString()).path("resultset")
+        .queryParam("sessionid", lensSessionId).request(mt).get(InMemoryQueryResult.class);
+      validateInmemoryResult(resultSet);
+    }
+  }
+  private String expectedJsonResult() {
+    StringBuilder expectedJson = new StringBuilder();
+    expectedJson.append("{\"inMemoryQueryResult\" : {\"rows\" : [ ")
+      .append("{\"values\" : [ {\n\"type\" : \"int\",\n\"value\" : 1}, {\"type\" : \"string\",\"value\" : \"one\"} ]},")
+      .append("{\"values\" : [ null, {\"type\" : \"string\",\"value\" : \"two\"} ]},")
+      .append("{\"values\" : [ {\"type\" : \"int\",\"value\" : 3}, null ]},")
+      .append("{\"values\" : [ null, null ]},")
+      .append("{\"values\" : [ {\"type\" : \"int\",\"value\" : 5}, {\"type\" : \"string\",\"value\" : \"\"} ]} ]}}");
+    return expectedJson.toString();
+  }
 
   /**
    * Validate inmemory result.
@@ -1182,22 +1180,22 @@ public class TestQueryService extends LensJerseyTest {
    * @throws IOException          Signals that an I/O exception has occurred.
    * @throws InterruptedException the interrupted exception
    */
-  @Test
-  public void testExecuteWithTimeoutQuery() throws IOException, InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testExecuteWithTimeoutQuery(MediaType mt) throws IOException, InterruptedException {
     final WebTarget target = target().path("queryapi/queries");
 
     final FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID, IDSTR from "
       + TEST_TABLE));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute_with_timeout"));
     // set a timeout value enough for tests
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("timeoutmillis").build(), "300000"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    QueryHandleWithResultSet result = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+    QueryHandleWithResultSet result = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
       new GenericType<LensAPIResult<QueryHandleWithResultSet>>() {}).getData();
     assertNotNull(result.getQueryHandle());
     assertNotNull(result.getResult());
@@ -1207,43 +1205,55 @@ public class TestQueryService extends LensJerseyTest {
     LensConf conf = new LensConf();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID, IDSTR from "
       + TEST_TABLE));
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute_with_timeout"));
     // set a timeout value enough for tests
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("timeoutmillis").build(), "300000"));
     mp2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    result = target.request().post(Entity.entity(mp2, MediaType.MULTIPART_FORM_DATA_TYPE),
-      new GenericType<LensAPIResult<QueryHandleWithResultSet>>() {}).getData();
-    assertNotNull(result.getQueryHandle());
-    assertNotNull(result.getResult());
-    validateInmemoryResult((InMemoryQueryResult) result.getResult());
+    validateInmemoryResultForTimeoutQuery(target, mp2, mt);
   }
 
+  private void validateInmemoryResultForTimeoutQuery(WebTarget target, FormDataMultiPart mp, MediaType mt) {
+    if (mt.equals(MediaType.APPLICATION_JSON_TYPE)) {
+      String result = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), String.class);
+      assertTrue(result.contains("\"type\" : \"queryHandleWithResultSet\""));
+      assertTrue(result.contains("\"status\" : \"SUCCESSFUL\""));
+      assertTrue(result.contains("\"isResultSetAvailable\" : true"));
+      assertTrue(result.replaceAll("\\W", "").contains(expectedJsonResult().replaceAll("\\W", "")));
+    } else {
+      QueryHandleWithResultSet result = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+        new GenericType<LensAPIResult<QueryHandleWithResultSet>>() {
+        }).getData();
+      assertNotNull(result.getQueryHandle());
+      assertNotNull(result.getResult());
+      validateInmemoryResult((InMemoryQueryResult) result.getResult());
+    }
+  }
   /**
    * Test execute with timeout query.
    *
    * @throws IOException          Signals that an I/O exception has occurred.
    * @throws InterruptedException the interrupted exception
    */
-  @Test
-  public void testExecuteWithTimeoutFailingQuery() throws IOException, InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testExecuteWithTimeoutFailingQuery(MediaType mt) throws IOException, InterruptedException {
     final WebTarget target = target().path("queryapi/queries");
 
     final FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID from nonexist"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute_with_timeout"));
     // set a timeout value enough for tests
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("timeoutmillis").build(), "300000"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
+    Response response = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
     assertEquals(response.getStatus(), BAD_REQUEST.getStatusCode());
   }
 
@@ -1281,7 +1291,7 @@ public class TestQueryService extends LensJerseyTest {
 
     final String query = "select ID from " + TEST_TABLE;
     QueryContext ctx = new QueryContext(query, null, queryConf, conf, queryService.getDrivers());
-    Map<LensDriver, String> driverQueries = new HashMap<LensDriver, String>();
+    Map<LensDriver, String> driverQueries = new HashMap<>();
     for (LensDriver driver : queryService.getDrivers()) {
       driverQueries.put(driver, query);
     }
@@ -1335,20 +1345,20 @@ public class TestQueryService extends LensJerseyTest {
    *
    * @throws InterruptedException the interrupted exception
    */
-  @Test
-  public void testEstimateNativeQuery() throws InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testEstimateNativeQuery(MediaType mt) throws InterruptedException {
     final WebTarget target = target().path("queryapi/queries");
 
     // estimate native query
     final FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID from " + TEST_TABLE));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "estimate"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    final QueryCostTO result = target.request()
+    final QueryCostTO result = target.request(mt)
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
         new GenericType<LensAPIResult<QueryCostTO>>() {}).getData();
     assertNotNull(result);
@@ -1361,8 +1371,8 @@ public class TestQueryService extends LensJerseyTest {
    * Check if DB static jars get passed to Hive driver
    * @throws Exception
    */
-  @Test
-  public void testHiveDriverGetsDBJars() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testHiveDriverGetsDBJars(MediaType mt) throws Exception {
     // Set DB to a db with static jars
     HiveSessionService sessionService = LensServices.get().getService(SessionService.NAME);
 
@@ -1385,7 +1395,7 @@ public class TestQueryService extends LensJerseyTest {
     try {
       // First execute query on the session with db should load jars from DB
       LensServerTestUtil.createTable(tableInDBWithJars, target(), sessionHandle, "(ID INT, IDSTR STRING) "
-        + "ROW FORMAT SERDE \"DatabaseJarSerde\"");
+        + "ROW FORMAT SERDE \"DatabaseJarSerde\"", mt);
 
       boolean addedToHiveDriver = false;
 
@@ -1404,7 +1414,7 @@ public class TestQueryService extends LensJerseyTest {
       log.info("@@@# database switch test");
       session.setCurrentDatabase(DB_WITH_JARS_2);
       LensServerTestUtil.createTable(tableInDBWithJars + "_2", target(), sessionHandle, "(ID INT, IDSTR STRING) "
-        + "ROW FORMAT SERDE \"DatabaseJarSerde\"");
+        + "ROW FORMAT SERDE \"DatabaseJarSerde\"", mt);
 
       // All db jars should have been added
       assertTrue(session.getDBResources(DB_WITH_JARS_2).isEmpty());
@@ -1423,8 +1433,8 @@ public class TestQueryService extends LensJerseyTest {
     } finally {
       log.info("@@@ TEST_OVER");
       try {
-        LensServerTestUtil.dropTable(tableInDBWithJars, target(), sessionHandle);
-        LensServerTestUtil.dropTable(tableInDBWithJars + "_2", target(), sessionHandle);
+        LensServerTestUtil.dropTable(tableInDBWithJars, target(), sessionHandle, mt);
+        LensServerTestUtil.dropTable(tableInDBWithJars + "_2", target(), sessionHandle, mt);
       } catch (Throwable th) {
         log.error("Exception while dropping table.", th);
       }
@@ -1432,21 +1442,21 @@ public class TestQueryService extends LensJerseyTest {
     }
   }
 
-  @Test
-  public void testRewriteFailure() {
+  @Test(dataProvider = "mediaTypeData")
+  public void testRewriteFailure(MediaType mt) {
     final WebTarget target = target().path("queryapi/queries");
 
     // estimate cube query which fails semantic analysis
     final FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(),
       "cube sdfelect ID from cube_nonexist"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "estimate"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    final Response response = target.request()
+    final Response response = target.request(mt)
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
 
 
@@ -1480,36 +1490,37 @@ public class TestQueryService extends LensJerseyTest {
     }
   }
 
-  @Test
-  public void testNonSelectQueriesWithPersistResult() throws InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testNonSelectQueriesWithPersistResult(MediaType mt) throws InterruptedException {
     LensConf conf = new LensConf();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "true");
     String tblName = "testNonSelectQueriesWithPersistResult";
-    LensServerTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf);
+    LensServerTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf, mt);
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, "true");
-    LensServerTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf);
+    LensServerTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf, mt);
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
-    LensServerTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf);
+    LensServerTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf, mt);
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, "false");
-    LensServerTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf);
+    LensServerTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf, mt);
   }
 
-  @Test
-  public void testEstimateGauges() {
+  @Test(dataProvider = "mediaTypeData")
+  public void testEstimateGauges(MediaType mt) {
     final WebTarget target = target().path("queryapi/queries");
 
     LensConf conf = new LensConf();
-    conf.addProperty(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY, "TestQueryService-testEstimateGauges");
+    String gaugeKey = "TestQueryService-testEstimateGauges" + mt.getSubtype();
+    conf.addProperty(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY, gaugeKey);
     // estimate native query
     final FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "select ID from " + TEST_TABLE));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "estimate"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    final QueryCostTO queryCostTO = target.request()
+    final QueryCostTO queryCostTO = target.request(mt)
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
         new GenericType<LensAPIResult<QueryCostTO>>() {
         }).getData();
@@ -1518,34 +1529,34 @@ public class TestQueryService extends LensJerseyTest {
     MetricRegistry reg = LensMetricsRegistry.getStaticRegistry();
 
     assertTrue(reg.getGauges().keySet().containsAll(Arrays.asList(
-      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-DRIVER_SELECTION",
-      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive1-CUBE_REWRITE",
-      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive1-DRIVER_ESTIMATE",
-      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive1-RewriteUtil-rewriteQuery",
-      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive2-CUBE_REWRITE",
-      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive2-DRIVER_ESTIMATE",
-      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive2-RewriteUtil-rewriteQuery",
-      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-jdbc/jdbc1-CUBE_REWRITE",
-      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-jdbc/jdbc1-DRIVER_ESTIMATE",
-      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-jdbc/jdbc1-RewriteUtil-rewriteQuery",
-      "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-PARALLEL_ESTIMATE")),
+      "lens.MethodMetricGauge." + gaugeKey + "-DRIVER_SELECTION",
+      "lens.MethodMetricGauge." + gaugeKey + "-hive/hive1-CUBE_REWRITE",
+      "lens.MethodMetricGauge." + gaugeKey + "-hive/hive1-DRIVER_ESTIMATE",
+      "lens.MethodMetricGauge." + gaugeKey + "-hive/hive1-RewriteUtil-rewriteQuery",
+      "lens.MethodMetricGauge." + gaugeKey + "-hive/hive2-CUBE_REWRITE",
+      "lens.MethodMetricGauge." + gaugeKey + "-hive/hive2-DRIVER_ESTIMATE",
+      "lens.MethodMetricGauge." + gaugeKey + "-hive/hive2-RewriteUtil-rewriteQuery",
+      "lens.MethodMetricGauge." + gaugeKey + "-jdbc/jdbc1-CUBE_REWRITE",
+      "lens.MethodMetricGauge." + gaugeKey + "-jdbc/jdbc1-DRIVER_ESTIMATE",
+      "lens.MethodMetricGauge." + gaugeKey + "-jdbc/jdbc1-RewriteUtil-rewriteQuery",
+      "lens.MethodMetricGauge." + gaugeKey + "-PARALLEL_ESTIMATE")),
       reg.getGauges().keySet().toString());
   }
 
-  @Test
-  public void testQueryRejection() throws InterruptedException, IOException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testQueryRejection(MediaType mt) throws InterruptedException, IOException {
     final WebTarget target = target().path("queryapi/queries");
 
     final FormDataMultiPart mp = new FormDataMultiPart();
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), "blah select ID from "
       + TEST_TABLE));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
-      MediaType.APPLICATION_XML_TYPE));
+      mt));
 
-    Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
+    Response response = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
     assertEquals(response.getStatus(), 400);
   }
 
@@ -1555,29 +1566,29 @@ public class TestQueryService extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    * @throws IOException          Signals that an I/O exception has occurred.
    */
-  @Test
-  public void testQueryPurger() throws InterruptedException, IOException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testQueryPurger(MediaType mt) throws InterruptedException, IOException {
     waitForPurge();
     LensConf conf = getLensConf(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
     // test post execute op
     LensQuery ctx1 = executeAndWaitForQueryToFinish(target(), lensSessionId,
       "select ID, IDSTR from " + TEST_TABLE,
-      Optional.of(conf), Optional.of(Status.SUCCESSFUL));
+      Optional.of(conf), Optional.of(Status.SUCCESSFUL), mt);
     LensQuery ctx2 = executeAndWaitForQueryToFinish(target(), lensSessionId,
       "select ID, IDSTR from " + TEST_TABLE,
-      Optional.of(conf), Optional.of(Status.SUCCESSFUL));
+      Optional.of(conf), Optional.of(Status.SUCCESSFUL), mt);
     LensQuery ctx3 = executeAndWaitForQueryToFinish(target(), lensSessionId,
       "select ID, IDSTR from " + TEST_TABLE,
-      Optional.of(conf), Optional.of(Status.SUCCESSFUL));
+      Optional.of(conf), Optional.of(Status.SUCCESSFUL), mt);
     waitForPurge(3, queryService.finishedQueries);
     assertEquals(queryService.finishedQueries.size(), 3);
-    getLensQueryResult(target(), lensSessionId, ctx3.getQueryHandle());
+    getLensQueryResultAsString(target(), lensSessionId, ctx3.getQueryHandle(), mt);
     waitForPurge(2, queryService.finishedQueries);
     assertTrue(queryService.finishedQueries.size() == 2);
-    getLensQueryResult(target(), lensSessionId, ctx2.getQueryHandle());
+    getLensQueryResultAsString(target(), lensSessionId, ctx2.getQueryHandle(), mt);
     waitForPurge(1, queryService.finishedQueries);
     assertTrue(queryService.finishedQueries.size() == 1);
-    getLensQueryResult(target(), lensSessionId, ctx1.getQueryHandle());
+    getLensQueryResultAsString(target(), lensSessionId, ctx1.getQueryHandle(), mt);
   }
 
   /**
@@ -1585,19 +1596,19 @@ public class TestQueryService extends LensJerseyTest {
    *
    * @throws Exception
    */
-  @Test
-  public void testSessionClose() throws Exception {
+  @Test(dataProvider = "mediaTypeData")
+  public void testSessionClose(MediaType mt) throws Exception {
     // Query with group by, will run long enough to close the session before finish
     String query = "select ID, IDSTR, count(*) from " + TEST_TABLE + " group by ID, IDSTR";
     SessionService sessionService = LensServices.get().getService(HiveSessionService.NAME);
-    Map<String, String> sessionconf = new HashMap<String, String>();
+    Map<String, String> sessionconf = new HashMap<>();
     LensSessionHandle sessionHandle = sessionService.openSession("foo", "bar", "default", sessionconf);
     LensConf conf = getLensConf(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "true");
     QueryHandle qHandle =
-      executeAndGetHandle(target(), Optional.of(sessionHandle), Optional.of(query), Optional.of(conf));
+      executeAndGetHandle(target(), Optional.of(sessionHandle), Optional.of(query), Optional.of(conf), mt);
     sessionService.closeSession(sessionHandle);
     sessionHandle = sessionService.openSession("foo", "bar", "default", sessionconf);
-    waitForQueryToFinish(target(), sessionHandle, qHandle, Status.SUCCESSFUL);
+    waitForQueryToFinish(target(), sessionHandle, qHandle, Status.SUCCESSFUL, mt);
   }
 
   @AfterMethod

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java b/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
index 30d1e34..6db990e 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
@@ -49,11 +49,9 @@ import org.apache.lens.server.common.TestResourceFile;
 
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 
-import org.glassfish.jersey.client.ClientConfig;
 import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataMultiPart;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
 import org.testng.annotations.AfterTest;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
@@ -84,8 +82,8 @@ public class TestResultFormatting extends LensJerseyTest {
     queryService = LensServices.get().getService(QueryExecutionService.NAME);
     lensSessionId = queryService.openSession("foo", "bar", new HashMap<String, String>());
     createTable(testTable, target(), lensSessionId,
-      "(ID INT, IDSTR STRING, IDARR ARRAY<INT>, IDSTRARR ARRAY<STRING>)");
-    loadDataFromClasspath(testTable, TestResourceFile.TEST_DATA2_FILE.getValue(), target(), lensSessionId);
+      "(ID INT, IDSTR STRING, IDARR ARRAY<INT>, IDSTRARR ARRAY<STRING>)", defaultMT);
+    loadDataFromClasspath(testTable, TestResourceFile.TEST_DATA2_FILE.getValue(), target(), lensSessionId, defaultMT);
   }
 
   /*
@@ -95,7 +93,7 @@ public class TestResultFormatting extends LensJerseyTest {
    */
   @AfterTest
   public void tearDown() throws Exception {
-    dropTable(testTable, target(), lensSessionId);
+    dropTable(testTable, target(), lensSessionId, defaultMT);
     queryService.closeSession(lensSessionId);
     super.tearDown();
   }
@@ -110,16 +108,6 @@ public class TestResultFormatting extends LensJerseyTest {
     return new QueryApp();
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.glassfish.jersey.test.JerseyTest#configureClient(org.glassfish.jersey.client.ClientConfig)
-   */
-  @Override
-  protected void configureClient(ClientConfig config) {
-    config.register(MultiPartFeature.class);
-  }
-
   /** The test table. */
   private static String testTable = "RESULT_TEST_TABLE";
 
@@ -131,15 +119,15 @@ public class TestResultFormatting extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    * @throws IOException          Signals that an I/O exception has occurred.
    */
-  @Test
-  public void testResultFormatterInMemoryResult() throws InterruptedException, IOException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testResultFormatterInMemoryResult(MediaType mt) throws InterruptedException, IOException {
     LensConf conf = new LensConf();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
     conf.addProperty(LensConfConstants.QUERY_OUTPUT_SERDE, LazySimpleSerDe.class.getCanonicalName());
-    testResultFormatter(conf, QueryStatus.Status.SUCCESSFUL, false, null);
+    testResultFormatter(conf, QueryStatus.Status.SUCCESSFUL, false, null, mt);
 
     queryService.conf.set(LensConfConstants.RESULT_FS_READ_URL, "filereadurl://");
-    testResultFormatter(conf, QueryStatus.Status.SUCCESSFUL, false, "filereadurl://");
+    testResultFormatter(conf, QueryStatus.Status.SUCCESSFUL, false, "filereadurl://", mt);
     queryService.conf.unset(LensConfConstants.RESULT_FS_READ_URL);
   }
 
@@ -151,14 +139,14 @@ public class TestResultFormatting extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    * @throws IOException          Signals that an I/O exception has occurred.
    */
-  @Test
-  public void testResultFormatterHDFSpersistentResult() throws InterruptedException, IOException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testResultFormatterHDFSpersistentResult(MediaType mt) throws InterruptedException, IOException {
     LensConf conf = new LensConf();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "true");
-    testResultFormatter(conf, QueryStatus.Status.SUCCESSFUL, false, null);
+    testResultFormatter(conf, QueryStatus.Status.SUCCESSFUL, false, null, mt);
 
     queryService.conf.set(LensConfConstants.RESULT_FS_READ_URL, "filereadurl://");
-    testResultFormatter(conf, QueryStatus.Status.SUCCESSFUL, false, "filereadurl://");
+    testResultFormatter(conf, QueryStatus.Status.SUCCESSFUL, false, "filereadurl://", mt);
     queryService.conf.unset(LensConfConstants.RESULT_FS_READ_URL);
   }
 
@@ -168,12 +156,12 @@ public class TestResultFormatting extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    * @throws IOException          Signals that an I/O exception has occurred.
    */
-  @Test
-  public void testPersistentResultWithMaxSize() throws InterruptedException, IOException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testPersistentResultWithMaxSize(MediaType mt) throws InterruptedException, IOException {
     LensConf conf = new LensConf();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "true");
     conf.addProperty(LensConfConstants.RESULT_FORMAT_SIZE_THRESHOLD, "1");
-    testResultFormatter(conf, QueryStatus.Status.SUCCESSFUL, true, null);
+    testResultFormatter(conf, QueryStatus.Status.SUCCESSFUL, true, null, mt);
   }
 
   /**
@@ -182,12 +170,12 @@ public class TestResultFormatting extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    * @throws IOException          Signals that an I/O exception has occurred.
    */
-  @Test
-  public void testResultFormatterFailure() throws InterruptedException, IOException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testResultFormatterFailure(MediaType mt) throws InterruptedException, IOException {
     LensConf conf = new LensConf();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
     conf.addProperty(LensConfConstants.QUERY_OUTPUT_SERDE, "NonexistentSerde.class");
-    testResultFormatter(conf, QueryStatus.Status.FAILED, false, null);
+    testResultFormatter(conf, QueryStatus.Status.FAILED, false, null, mt);
   }
 
   // test with execute async post with result formatter, get query, get results
@@ -202,33 +190,31 @@ public class TestResultFormatting extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    * @throws IOException          Signals that an I/O exception has occurred.
    */
-  private void testResultFormatter(LensConf conf, Status status, boolean isDir, String reDirectUrl)
+  private void testResultFormatter(LensConf conf, Status status, boolean isDir, String reDirectUrl, MediaType mt)
     throws InterruptedException, IOException {
     // test post execute op
     final WebTarget target = target().path("queryapi/queries");
 
     final FormDataMultiPart mp = new FormDataMultiPart();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, "true");
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId, mt));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(),
       "select ID, IDSTR, IDARR, IDSTRARR from " + testTable));
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
-    QueryHandle handle = target.request()
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf, mt));
+    QueryHandle handle = target.request(mt)
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
         new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
     assertNotNull(handle);
 
     // Get query
-    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(mt)
       .get(LensQuery.class);
     // wait till the query finishes
     QueryStatus stat = ctx.getStatus();
     while (!stat.finished()) {
-      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request(mt).get(LensQuery.class);
       stat = ctx.getStatus();
       Thread.sleep(1000);
     }
@@ -257,7 +243,7 @@ public class TestResultFormatting extends LensJerseyTest {
       // fetch results
       TestQueryService.validatePersistedResult(handle, target(), lensSessionId, new String[][]{
         {"ID", "INT"}, {"IDSTR", "STRING"}, {"IDARR", "ARRAY"}, {"IDSTRARR", "ARRAY"},
-      }, isDir);
+      }, isDir, mt);
       if (!isDir) {
         TestQueryService.validateHttpEndPoint(target(), lensSessionId, handle, reDirectUrl);
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/query/save/TestSavedQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/save/TestSavedQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/save/TestSavedQueryService.java
index 7c6ce49..70e2dbc 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/save/TestSavedQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/save/TestSavedQueryService.java
@@ -42,8 +42,6 @@ import org.apache.lens.server.api.query.save.SavedQueryService;
 import org.apache.lens.server.error.LensExceptionMapper;
 import org.apache.lens.server.query.QueryExecutionServiceImpl;
 
-import org.glassfish.jersey.client.ClientConfig;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
 import org.testng.annotations.AfterTest;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
@@ -112,12 +110,6 @@ public class TestSavedQueryService extends LensJerseyTest {
     return new SavedQueryTestApp();
   }
 
-  @Override
-  protected void configureClient(ClientConfig config) {
-    config.register(MultiPartFeature.class);
-    config.register(LensJAXBContextResolver.class);
-  }
-
   @Test
   public void testResource() throws InterruptedException {
     assertEquals(


[05/51] [abbrv] lens git commit: LENS-760 : Session close should not result in running query failures.

Posted by de...@apache.org.
LENS-760 : Session close should not result in running query failures.


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/ff891e2c
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/ff891e2c
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/ff891e2c

Branch: refs/heads/current-release-line
Commit: ff891e2cf2a77fd28a7476ad6a6af814bb013661
Parents: 7c7c86d
Author: Deepak Barr <de...@apache.org>
Authored: Sat Dec 12 00:17:47 2015 +0530
Committer: Deepak Kumar Barr <de...@apache.org>
Committed: Sat Dec 12 00:17:47 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/driver/hive/HiveDriver.java | 95 +++++++++++++++-----
 .../lens/driver/hive/TestRemoteHiveDriver.java  |  4 +-
 .../lens/server/query/TestQueryService.java     | 20 +++++
 3 files changed, 98 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/ff891e2c/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index a84c679..253cfc4 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -112,6 +112,12 @@ public class HiveDriver extends AbstractLensDriver {
   /** The hive handles. */
   private Map<QueryHandle, OperationHandle> hiveHandles = new ConcurrentHashMap<QueryHandle, OperationHandle>();
 
+  /** The orphaned hive sessions. */
+  private ConcurrentLinkedQueue<SessionHandle> orphanedHiveSessions;
+
+  /** The opHandle to hive session map. */
+  private Map<OperationHandle, SessionHandle> opHandleToSession;
+
   /** The session lock. */
   private final Lock sessionLock;
 
@@ -314,6 +320,8 @@ public class HiveDriver extends AbstractLensDriver {
   public HiveDriver() throws LensException {
     this.sessionLock = new ReentrantLock();
     lensToHiveSession = new HashMap<String, SessionHandle>();
+    opHandleToSession = new ConcurrentHashMap<OperationHandle, SessionHandle>();
+    orphanedHiveSessions = new ConcurrentLinkedQueue<SessionHandle>();
     resourcesAddedForSession = new HashMap<SessionHandle, Boolean>();
     connectionExpiryThread.setDaemon(true);
     connectionExpiryThread.setName("HiveDriver-ConnectionExpiryThread");
@@ -491,15 +499,18 @@ public class HiveDriver extends AbstractLensDriver {
    */
   // assuming this is only called for executing explain/insert/set/delete/etc... queries which don't ask to fetch data.
   public LensResultSet execute(QueryContext ctx) throws LensException {
+    OperationHandle op = null;
     try {
       addPersistentPath(ctx);
       Configuration qdconf = ctx.getDriverConf(this);
       qdconf.set("mapred.job.name", ctx.getQueryHandle().toString());
-      OperationHandle op = getClient().executeStatement(getSession(ctx), ctx.getSelectedDriverQuery(),
+      SessionHandle sessionHandle = getSession(ctx);
+      op = getClient().executeStatement(sessionHandle, ctx.getSelectedDriverQuery(),
         qdconf.getValByRegex(".*"));
       log.info("The hive operation handle: {}", op);
       ctx.setDriverOpHandle(op.toString());
       hiveHandles.put(ctx.getQueryHandle(), op);
+      opHandleToSession.put(op, sessionHandle);
       updateStatus(ctx);
       OperationStatus status = getClient().getOperationStatus(op);
 
@@ -519,6 +530,10 @@ public class HiveDriver extends AbstractLensDriver {
     } catch (HiveSQLException hiveErr) {
       handleHiveServerError(ctx, hiveErr);
       throw new LensException("Error executing query", hiveErr);
+    } finally {
+      if (null != op) {
+        opHandleToSession.remove(op);
+      }
     }
   }
 
@@ -550,11 +565,13 @@ public class HiveDriver extends AbstractLensDriver {
         }
       }
       queryHook.preLaunch(ctx);
-      OperationHandle op = getClient().executeStatementAsync(getSession(ctx), ctx.getSelectedDriverQuery(),
+      SessionHandle sessionHandle = getSession(ctx);
+      OperationHandle op = getClient().executeStatementAsync(sessionHandle, ctx.getSelectedDriverQuery(),
         qdconf.getValByRegex(".*"));
       ctx.setDriverOpHandle(op.toString());
       log.info("QueryHandle: {} HiveHandle:{}", ctx.getQueryHandle(), op);
       hiveHandles.put(ctx.getQueryHandle(), op);
+      opHandleToSession.put(op, sessionHandle);
     } catch (IOException e) {
       throw new LensException("Error adding persistent path", e);
     } catch (HiveSQLException e) {
@@ -726,6 +743,18 @@ public class HiveDriver extends AbstractLensDriver {
       } catch (HiveSQLException e) {
         checkInvalidOperation(handle, e);
         throw new LensException("Unable to close query", e);
+      } finally {
+        SessionHandle hiveSession = opHandleToSession.remove(opHandle);
+        if (null != hiveSession && !opHandleToSession.containsValue(hiveSession)
+          && orphanedHiveSessions.contains(hiveSession)) {
+          orphanedHiveSessions.remove(hiveSession);
+          try {
+            getClient().closeSession(hiveSession);
+            log.info("Closed orphaned hive session : {}", hiveSession.getHandleIdentifier());
+          } catch (HiveSQLException e) {
+            log.warn("Error closing orphan hive session : {} ", hiveSession.getHandleIdentifier(), e);
+          }
+        }
       }
     }
   }
@@ -739,6 +768,7 @@ public class HiveDriver extends AbstractLensDriver {
   public boolean cancelQuery(QueryHandle handle) throws LensException {
     log.info("CancelQuery: {}", handle);
     OperationHandle hiveHandle = getHiveHandle(handle);
+    opHandleToSession.remove(hiveHandle);
     try {
       log.info("CancelQuery hiveHandle: {}", hiveHandle);
       getClient().cancelOperation(hiveHandle);
@@ -757,22 +787,11 @@ public class HiveDriver extends AbstractLensDriver {
   @Override
   public void close() {
     log.info("CloseDriver {}", getFullyQualifiedName());
-    // Close this driver and release all resources
+    // Close this driver
     sessionLock.lock();
-    try {
-      for (String lensSessionDbKey : lensToHiveSession.keySet()) {
-        try {
-          getClient().closeSession(lensToHiveSession.get(lensSessionDbKey));
-        } catch (Exception e) {
-          checkInvalidSession(e);
-          log.warn("Error closing session for lens session: {}, hive session: ", lensSessionDbKey,
-            lensToHiveSession.get(lensSessionDbKey), e);
-        }
-      }
-      lensToHiveSession.clear();
-    } finally {
-      sessionLock.unlock();
-    }
+    lensToHiveSession.clear();
+    orphanedHiveSessions.clear();
+    sessionLock.unlock();
   }
 
   /**
@@ -1087,6 +1106,21 @@ public class HiveDriver extends AbstractLensDriver {
       }
       log.info("Hive driver {} recovered {} sessions", getFullyQualifiedName(), lensToHiveSession.size());
     }
+    int numOpHandles = in.readInt();
+    for (int i = 0; i < numOpHandles; i++) {
+      OperationHandle opHandle = new OperationHandle((TOperationHandle) in.readObject());
+      SessionHandle sHandle = new SessionHandle((TSessionHandle) in.readObject(),
+        TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6);
+      opHandleToSession.put(opHandle, sHandle);
+    }
+    log.info("Hive driver {} recovered {} operation handles", getFullyQualifiedName(), opHandleToSession.size());
+    int numOrphanedSessions = in.readInt();
+    for (int i = 0; i < numOrphanedSessions; i++) {
+      SessionHandle sHandle = new SessionHandle((TSessionHandle) in.readObject(),
+        TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6);
+      orphanedHiveSessions.add(sHandle);
+    }
+    log.info("Hive driver {} recovered {} orphaned sessions", getFullyQualifiedName(), orphanedHiveSessions.size());
   }
 
   /*
@@ -1111,6 +1145,17 @@ public class HiveDriver extends AbstractLensDriver {
         out.writeObject(entry.getValue().toTSessionHandle());
       }
       log.info("Hive driver {} persisted {} sessions", getFullyQualifiedName(), lensToHiveSession.size());
+      out.writeInt(opHandleToSession.size());
+      for (Map.Entry<OperationHandle, SessionHandle> entry : opHandleToSession.entrySet()) {
+        out.writeObject(entry.getKey().toTOperationHandle());
+        out.writeObject(entry.getValue().toTSessionHandle());
+      }
+      log.info("Hive driver {} persisted {} operation handles", getFullyQualifiedName(), opHandleToSession.size());
+      out.writeInt(orphanedHiveSessions.size());
+      for (SessionHandle sessionHandle : orphanedHiveSessions) {
+        out.writeObject(sessionHandle.toTSessionHandle());
+      }
+      log.info("Hive driver {} persisted {} orphaned sessions", getFullyQualifiedName(), orphanedHiveSessions.size());
     }
   }
 
@@ -1243,9 +1288,15 @@ public class HiveDriver extends AbstractLensDriver {
           SessionHandle hiveSession = lensToHiveSession.remove(sessionDbKey);
           if (hiveSession != null) {
             try {
-              getClient().closeSession(hiveSession);
-              log.info("Closed Hive session {} for lens session {}", hiveSession.getHandleIdentifier(),
-                sessionDbKey);
+              if (isSessionClosable(hiveSession)) {
+                getClient().closeSession(hiveSession);
+                log.info("Closed Hive session {} for lens session {}", hiveSession.getHandleIdentifier(),
+                  sessionDbKey);
+              } else {
+                log.info("Skipped closing hive session {} for lens session {} due to active operations",
+                  hiveSession.getHandleIdentifier(), sessionDbKey);
+                orphanedHiveSessions.add(hiveSession);
+              }
             } catch (Exception e) {
               log.error("Error closing hive session {} for lens session {}", hiveSession.getHandleIdentifier(),
                 sessionDbKey, e);
@@ -1259,6 +1310,10 @@ public class HiveDriver extends AbstractLensDriver {
     }
   }
 
+  private boolean isSessionClosable(SessionHandle hiveSession) {
+    return !opHandleToSession.containsValue(hiveSession);
+  }
+
   /**
    * Close all connections.
    */

http://git-wip-us.apache.org/repos/asf/lens/blob/ff891e2c/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
index ab5ada9..4f18c24 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
@@ -274,9 +274,11 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
 
     // Write driver to stream
     ByteArrayOutputStream driverBytes = new ByteArrayOutputStream();
+    ObjectOutputStream out = new ObjectOutputStream(driverBytes);
     try {
-      oldDriver.writeExternal(new ObjectOutputStream(driverBytes));
+      oldDriver.writeExternal(out);
     } finally {
+      out.close();
       driverBytes.close();
     }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/ff891e2c/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index f6693aa..efef358 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -1472,6 +1472,26 @@ public class TestQueryService extends LensJerseyTest {
     getLensQueryResult(target(), lensSessionId, ctx1.getQueryHandle());
   }
 
+  /**
+   * Test session close when a query is active on the session
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testSessionClose() throws Exception {
+    // Query with group by, will run long enough to close the session before finish
+    String query = "select ID, IDSTR, count(*) from " + TEST_TABLE + " group by ID, IDSTR";
+    SessionService sessionService = LensServices.get().getService(HiveSessionService.NAME);
+    Map<String, String> sessionconf = new HashMap<String, String>();
+    LensSessionHandle sessionHandle = sessionService.openSession("foo", "bar", "default", sessionconf);
+    LensConf conf = getLensConf(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "true");
+    QueryHandle qHandle =
+      executeAndGetHandle(target(), Optional.of(sessionHandle), Optional.of(query), Optional.of(conf));
+    sessionService.closeSession(sessionHandle);
+    sessionHandle = sessionService.openSession("foo", "bar", "default", sessionconf);
+    waitForQueryToFinish(target(), sessionHandle, qHandle, Status.SUCCESSFUL);
+  }
+
   @AfterMethod
   private void waitForPurge() throws InterruptedException {
     waitForPurge(0, queryService.finishedQueries);


[30/51] [abbrv] lens git commit: LENS-917 : Fixs table pruning for multiple chains for same destination table

Posted by de...@apache.org.
LENS-917 : Fixs table pruning for multiple chains for same destination table


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/c7451f8e
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/c7451f8e
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/c7451f8e

Branch: refs/heads/current-release-line
Commit: c7451f8e8e8f429fc55458b03dbb10a2b7428be9
Parents: 9c03c76
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Tue Jan 12 11:09:24 2016 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Jan 12 11:09:24 2016 +0530

----------------------------------------------------------------------
 .../org/apache/lens/cube/parse/Aliased.java     |   4 +
 .../lens/cube/parse/CandidateTableResolver.java | 150 +++++++++----------
 .../lens/cube/parse/CubeQueryContext.java       |  43 +++---
 .../lens/cube/parse/ExpressionResolver.java     |   3 +
 .../lens/cube/parse/StorageTableResolver.java   |   4 +-
 .../lens/cube/parse/join/AutoJoinContext.java   |  12 +-
 .../apache/lens/cube/parse/CubeTestSetup.java   |  28 ++++
 .../cube/parse/TestDenormalizationResolver.java |  16 +-
 8 files changed, 152 insertions(+), 108 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/c7451f8e/lens-cube/src/main/java/org/apache/lens/cube/parse/Aliased.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/Aliased.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/Aliased.java
index 56fe9fc..160a9c6 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/Aliased.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/Aliased.java
@@ -36,4 +36,8 @@ public class Aliased<T extends Named> {
   public static <K extends Named> Aliased<K> create(K obj, String alias) {
     return new Aliased<K>(obj, alias);
   }
+
+  public String getName() {
+    return object.getName();
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/c7451f8e/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
index 38ff5a4..00ccf36 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
@@ -67,9 +67,9 @@ class CandidateTableResolver implements ContextRewriter {
       checkForQueriedColumns = false;
     } else {
       // populate optional tables
-      for (Dimension dim : cubeql.getOptionalDimensions()) {
+      for (Aliased<Dimension> dim : cubeql.getOptionalDimensions()) {
         log.info("Populating optional dim:{}", dim);
-        populateDimTables(dim, cubeql, true);
+        populateDimTables(dim.getObject(), cubeql, true);
       }
       if (cubeql.getAutoJoinCtx() != null) {
         // Before checking for candidate table columns, prune join paths containing non existing columns
@@ -117,7 +117,7 @@ class CandidateTableResolver implements ContextRewriter {
       return;
     }
     try {
-      Set<CandidateDim> candidates = new HashSet<CandidateDim>();
+      Set<CandidateDim> candidates = new HashSet<>();
       cubeql.getCandidateDimTables().put(dim, candidates);
       List<CubeDimensionTable> dimtables = cubeql.getMetastoreClient().getAllDimensionTables(dim);
       if (dimtables.isEmpty()) {
@@ -126,7 +126,7 @@ class CandidateTableResolver implements ContextRewriter {
             "Dimension tables do not exist");
         } else {
           log.info("Not considering optional dimension {}  as, No dimension tables exist", dim);
-          removeOptionalDim(cubeql, dim);
+          removeOptionalDimWithoutAlias(cubeql, dim);
         }
       }
       for (CubeDimensionTable dimtable : dimtables) {
@@ -139,44 +139,30 @@ class CandidateTableResolver implements ContextRewriter {
     }
   }
 
-  private void pruneOptionalDims(CubeQueryContext cubeql) {
-    Set<Dimension> tobeRemoved = new HashSet<Dimension>();
-    Set<CandidateTable> allCandidates = new HashSet<CandidateTable>();
-    allCandidates.addAll(cubeql.getCandidateFacts());
-    for (Set<CandidateDim> cdims : cubeql.getCandidateDimTables().values()) {
-      allCandidates.addAll(cdims);
-    }
-    Set<CandidateTable> removedCandidates = new HashSet<CandidateTable>();
-    for (Map.Entry<Dimension, OptionalDimCtx> optdimEntry : cubeql.getOptionalDimensionMap().entrySet()) {
-      Dimension dim = optdimEntry.getKey();
-      OptionalDimCtx optdim = optdimEntry.getValue();
-      Iterator<CandidateTable> iter = optdim.requiredForCandidates.iterator();
-      while (iter.hasNext()) {
-        CandidateTable candidate = iter.next();
-        if (!allCandidates.contains(candidate)) {
-          log.info("Removing candidate {} from requiredForCandidates of {}, as it is no more candidate", candidate,
-            dim);
-          iter.remove();
-          removedCandidates.add(candidate);
-        }
+  private void removeOptionalDimWithoutAlias(CubeQueryContext cubeql, Dimension dim) {
+    for (Aliased<Dimension> aDim : cubeql.getOptionalDimensions()) {
+      if (aDim.getName().equals(dim.getName())) {
+        removeOptionalDim(cubeql, aDim);
       }
     }
-    Set<CandidateTable> candidatesReachableThroughRefs = new HashSet<CandidateTable>();
-    for (Map.Entry<Dimension, OptionalDimCtx> optdimEntry : cubeql.getOptionalDimensionMap().entrySet()) {
-      Dimension dim = optdimEntry.getKey();
+  }
+
+  private void pruneOptionalDims(CubeQueryContext cubeql) {
+    Set<Aliased<Dimension>> tobeRemoved = new HashSet<>();
+    for (Map.Entry<Aliased<Dimension>, OptionalDimCtx> optdimEntry : cubeql.getOptionalDimensionMap().entrySet()) {
+      Aliased<Dimension> dim = optdimEntry.getKey();
       OptionalDimCtx optdim = optdimEntry.getValue();
-      candidatesReachableThroughRefs.addAll(optdim.requiredForCandidates);
       if ((!optdim.colQueried.isEmpty() && optdim.requiredForCandidates.isEmpty()) && !optdim.isRequiredInJoinChain) {
         log.info("Not considering optional dimension {} as all requiredForCandidates are removed", dim);
         tobeRemoved.add(dim);
       }
     }
-    for (Dimension dim : tobeRemoved) {
+    for (Aliased<Dimension> dim : tobeRemoved) {
       removeOptionalDim(cubeql, dim);
     }
   }
 
-  private void removeOptionalDim(CubeQueryContext cubeql, Dimension dim) {
+  private void removeOptionalDim(CubeQueryContext cubeql, Aliased<Dimension> dim) {
     OptionalDimCtx optdim = cubeql.getOptionalDimensionMap().remove(dim);
     // remove all the depending candidate table as well
     for (CandidateTable candidate : optdim.requiredForCandidates) {
@@ -240,7 +226,8 @@ class CandidateTableResolver implements ContextRewriter {
 
         // go over join chains and prune facts that dont have any of the columns in each chain
         for (JoinChain chain : cubeql.getJoinchains().values()) {
-          OptionalDimCtx optdim = cubeql.getOptionalDimensionMap().get(cubeql.getCubeTbls().get(chain.getName()));
+          OptionalDimCtx optdim = cubeql.getOptionalDimensionMap().get(Aliased.create((Dimension)cubeql.getCubeTbls()
+            .get(chain.getName()), chain.getName()));
           if (!checkForColumnExists(cfact, chain.getSourceColumns())) {
             // check if chain is optional or not
             if (optdim == null) {
@@ -282,7 +269,7 @@ class CandidateTableResolver implements ContextRewriter {
           i.remove();
         }
       }
-      Set<String> dimExprs = new HashSet<String>(cubeql.getQueriedExprs());
+      Set<String> dimExprs = new HashSet<>(cubeql.getQueriedExprs());
       dimExprs.removeAll(cubeql.getQueriedExprsWithMeasures());
       if (cubeql.getCandidateFacts().size() == 0) {
         throw new LensException(LensCubeErrorCode.NO_FACT_HAS_COLUMN.getLensErrorInfo(),
@@ -292,9 +279,9 @@ class CandidateTableResolver implements ContextRewriter {
       Set<Set<CandidateFact>> cfactset;
       if (queriedMsrs.isEmpty() && cubeql.getQueriedExprsWithMeasures().isEmpty()) {
         // if no measures are queried, add all facts individually as single covering sets
-        cfactset = new HashSet<Set<CandidateFact>>();
+        cfactset = new HashSet<>();
         for (CandidateFact cfact : cubeql.getCandidateFacts()) {
-          Set<CandidateFact> one = new LinkedHashSet<CandidateFact>();
+          Set<CandidateFact> one = new LinkedHashSet<>();
           one.add(cfact);
           cfactset.add(one);
         }
@@ -302,7 +289,7 @@ class CandidateTableResolver implements ContextRewriter {
       } else {
         // Find out candidate fact table sets which contain all the measures
         // queried
-        List<CandidateFact> cfacts = new ArrayList<CandidateFact>(cubeql.getCandidateFacts());
+        List<CandidateFact> cfacts = new ArrayList<>(cubeql.getCandidateFacts());
         cfactset = findCoveringSets(cubeql, cfacts, queriedMsrs,
           cubeql.getQueriedExprsWithMeasures());
         log.info("Measure covering fact sets :{}", cfactset);
@@ -324,8 +311,8 @@ class CandidateTableResolver implements ContextRewriter {
 
   static Set<Set<CandidateFact>> findCoveringSets(CubeQueryContext cubeql, List<CandidateFact> cfactsPassed,
     Set<String> msrs, Set<String> exprsWithMeasures) {
-    Set<Set<CandidateFact>> cfactset = new HashSet<Set<CandidateFact>>();
-    List<CandidateFact> cfacts = new ArrayList<CandidateFact>(cfactsPassed);
+    Set<Set<CandidateFact>> cfactset = new HashSet<>();
+    List<CandidateFact> cfacts = new ArrayList<>(cfactsPassed);
     for (Iterator<CandidateFact> i = cfacts.iterator(); i.hasNext();) {
       CandidateFact cfact = i.next();
       i.remove();
@@ -336,14 +323,14 @@ class CandidateTableResolver implements ContextRewriter {
         continue;
       } else if (cfact.getColumns().containsAll(msrs) && cubeql.getExprCtx().allEvaluable(cfact, exprsWithMeasures)) {
         // return single set
-        Set<CandidateFact> one = new LinkedHashSet<CandidateFact>();
+        Set<CandidateFact> one = new LinkedHashSet<>();
         one.add(cfact);
         cfactset.add(one);
       } else {
         // find the remaining measures in other facts
         if (i.hasNext()) {
-          Set<String> remainingMsrs = new HashSet<String>(msrs);
-          Set<String> remainingExprs = new HashSet<String>(exprsWithMeasures);
+          Set<String> remainingMsrs = new HashSet<>(msrs);
+          Set<String> remainingExprs = new HashSet<>(exprsWithMeasures);
           remainingMsrs.removeAll(cfact.getColumns());
           remainingExprs.removeAll(cubeql.getExprCtx().coveringExpressions(exprsWithMeasures, cfact));
           Set<Set<CandidateFact>> coveringSets = findCoveringSets(cubeql, cfacts, remainingMsrs, remainingExprs);
@@ -366,9 +353,13 @@ class CandidateTableResolver implements ContextRewriter {
     if (cubeql.getAutoJoinCtx() == null) {
       return;
     }
-    Set<Dimension> allDims = new HashSet<Dimension>(cubeql.getDimensions());
+    Set<Aliased<Dimension>> allDims = new HashSet<>();
+    for (Dimension dim : cubeql.getDimensions()) {
+      allDims.add(Aliased.create(dim));
+    }
     allDims.addAll(cubeql.getOptionalDimensions());
-    for (Dimension dim : allDims) {
+    for (Aliased<Dimension> aliasedDim : allDims) {
+      Dimension dim = aliasedDim.getObject();
       if (cubeql.getCandidateDimTables().get(dim) != null && !cubeql.getCandidateDimTables().get(dim).isEmpty()) {
         for (Iterator<CandidateDim> i = cubeql.getCandidateDimTables().get(dim).iterator(); i.hasNext();) {
           CandidateDim cdim = i.next();
@@ -377,11 +368,11 @@ class CandidateTableResolver implements ContextRewriter {
           // can participate in join
           // for each join path check for columns involved in path
           boolean removed = false;
-          for (Map.Entry<Dimension, Map<AbstractCubeTable, List<String>>> joincolumnsEntry : cubeql.getAutoJoinCtx()
-            .getJoinPathFromColumns().entrySet()) {
-            Dimension reachableDim = joincolumnsEntry.getKey();
+          for (Map.Entry<Aliased<Dimension>, Map<AbstractCubeTable, List<String>>> joincolumnsEntry : cubeql
+            .getAutoJoinCtx().getJoinPathFromColumns().entrySet()) {
+            Aliased<Dimension> reachableDim = joincolumnsEntry.getKey();
             OptionalDimCtx optdim = cubeql.getOptionalDimensionMap().get(reachableDim);
-            Collection<String> colSet = joincolumnsEntry.getValue().get((AbstractCubeTable) dim);
+            Collection<String> colSet = joincolumnsEntry.getValue().get(dim);
 
             if (!checkForColumnExists(cdim, colSet)) {
               if (optdim == null || optdim.isRequiredInJoinChain
@@ -397,11 +388,11 @@ class CandidateTableResolver implements ContextRewriter {
           }
           if (!removed) {
             // check for to columns
-            for (Map.Entry<Dimension, Map<AbstractCubeTable, List<String>>> joincolumnsEntry : cubeql.getAutoJoinCtx()
-              .getJoinPathToColumns().entrySet()) {
-              Dimension reachableDim = joincolumnsEntry.getKey();
+            for (Map.Entry<Aliased<Dimension>, Map<AbstractCubeTable, List<String>>> joincolumnsEntry : cubeql
+              .getAutoJoinCtx().getJoinPathToColumns().entrySet()) {
+              Aliased<Dimension> reachableDim = joincolumnsEntry.getKey();
               OptionalDimCtx optdim = cubeql.getOptionalDimensionMap().get(reachableDim);
-              Collection<String> colSet = joincolumnsEntry.getValue().get((AbstractCubeTable) dim);
+              Collection<String> colSet = joincolumnsEntry.getValue().get(dim);
 
               if (!checkForColumnExists(cdim, colSet)) {
                 if (optdim == null || optdim.isRequiredInJoinChain
@@ -418,18 +409,18 @@ class CandidateTableResolver implements ContextRewriter {
           }
           if (!removed) {
             // go over the referenced columns accessed in the query and find out which tables can participate
-            if (cubeql.getOptionalDimensionMap().get(dim) != null
-              && !checkForColumnExists(cdim, cubeql.getOptionalDimensionMap().get(dim).colQueried)) {
+            if (cubeql.getOptionalDimensionMap().get(aliasedDim) != null
+              && !checkForColumnExists(cdim, cubeql.getOptionalDimensionMap().get(aliasedDim).colQueried)) {
               i.remove();
               log.info("Not considering optional dimtable:{} as its denorm fields do not exist. Denorm fields:{}",
-                dimtable, cubeql.getOptionalDimensionMap().get(dim).colQueried);
-              cubeql.addDimPruningMsgs(dim, dimtable,
-                CandidateTablePruneCause.noColumnPartOfAJoinPath(cubeql.getOptionalDimensionMap().get(dim).colQueried));
+                dimtable, cubeql.getOptionalDimensionMap().get(aliasedDim).colQueried);
+              cubeql.addDimPruningMsgs(dim, dimtable, CandidateTablePruneCause
+                .noColumnPartOfAJoinPath(cubeql.getOptionalDimensionMap().get(aliasedDim).colQueried));
             }
           }
         }
         if (cubeql.getCandidateDimTables().get(dim).size() == 0) {
-          OptionalDimCtx optdim = cubeql.getOptionalDimensionMap().get(dim);
+          OptionalDimCtx optdim = cubeql.getOptionalDimensionMap().get(aliasedDim);
           if ((cubeql.getDimensions() != null && cubeql.getDimensions().contains(dim))
             || (optdim != null && optdim.isRequiredInJoinChain)) {
             throw new LensException(LensCubeErrorCode.NO_DIM_HAS_COLUMN.getLensErrorInfo(), dim.getName(),
@@ -438,7 +429,7 @@ class CandidateTableResolver implements ContextRewriter {
             // remove it from optional tables
             log.info("Not considering optional dimension {} as, No dimension table has the queried columns:{}"
               + " Clearing the required for candidates:{}", dim, optdim.colQueried, optdim.requiredForCandidates);
-            removeOptionalDim(cubeql, dim);
+            removeOptionalDim(cubeql, aliasedDim);
           }
         }
       }
@@ -456,11 +447,12 @@ class CandidateTableResolver implements ContextRewriter {
         CubeFactTable fact = cfact.fact;
 
         // for each join path check for columns involved in path
-        for (Map.Entry<Dimension, Map<AbstractCubeTable, List<String>>> joincolumnsEntry : cubeql.getAutoJoinCtx()
+        for (Map.Entry<Aliased<Dimension>, Map<AbstractCubeTable, List<String>>> joincolumnsEntry : cubeql
+          .getAutoJoinCtx()
           .getJoinPathFromColumns().entrySet()) {
-          Dimension reachableDim = joincolumnsEntry.getKey();
+          Aliased<Dimension> reachableDim = joincolumnsEntry.getKey();
           OptionalDimCtx optdim = cubeql.getOptionalDimensionMap().get(reachableDim);
-          colSet = joincolumnsEntry.getValue().get((AbstractCubeTable) cubeql.getCube());
+          colSet = joincolumnsEntry.getValue().get(cubeql.getCube());
 
           if (!checkForColumnExists(cfact, colSet)) {
             if (optdim == null || optdim.isRequiredInJoinChain
@@ -486,21 +478,21 @@ class CandidateTableResolver implements ContextRewriter {
    * available in candidate tables that want to use references
    */
   private void checkForSourceReachabilityForDenormCandidates(CubeQueryContext cubeql) {
-    if (cubeql.getOptionalDimensionMap().isEmpty()) {
+    if (cubeql.getOptionalDimensions().isEmpty()) {
       return;
     }
     if (cubeql.getAutoJoinCtx() == null) {
-      Set<Dimension> optionaldims = new HashSet<Dimension>(cubeql.getOptionalDimensions());
-      for (Dimension dim : optionaldims) {
+      Set<Aliased<Dimension>> optionaldims = new HashSet<>(cubeql.getOptionalDimensions());
+      for (Aliased<Dimension> dim : optionaldims) {
         log.info("Not considering optional dimension {} as, automatic join resolver is disbled ", dim);
         removeOptionalDim(cubeql, dim);
       }
       return;
     }
     // check for source columns for denorm columns
-    Map<Dimension, Set<CandidateTable>> removedCandidates = new HashMap<Dimension, Set<CandidateTable>>();
-    for (Map.Entry<Dimension, OptionalDimCtx> optdimEntry : cubeql.getOptionalDimensionMap().entrySet()) {
-      Dimension dim = optdimEntry.getKey();
+    Map<Aliased<Dimension>, Set<CandidateTable>> removedCandidates = new HashMap<>();
+    for (Map.Entry<Aliased<Dimension>, OptionalDimCtx> optdimEntry : cubeql.getOptionalDimensionMap().entrySet()) {
+      Aliased<Dimension> dim = optdimEntry.getKey();
       OptionalDimCtx optdim = optdimEntry.getValue();
       Iterator<CandidateTable> iter = optdim.requiredForCandidates.iterator();
       // remove candidates from each optional dim if the dimension is not reachable from candidate
@@ -537,17 +529,17 @@ class CandidateTableResolver implements ContextRewriter {
     // F5 | Directly available | Directly available
     // F6 | Directly available | Not reachable
     // F3 and F4 will get pruned while iterating over col1 and F1, F6 will get pruned while iterating over col2.
-    for (Map.Entry<String, Set<Dimension>> dimColEntry : cubeql.getRefColToDim().entrySet()) {
-      Set<CandidateTable> candidatesReachableThroughRefs = new HashSet<CandidateTable>();
+    for (Map.Entry<String, Set<Aliased<Dimension>>> dimColEntry : cubeql.getRefColToDim().entrySet()) {
+      Set<CandidateTable> candidatesReachableThroughRefs = new HashSet<>();
       String col = dimColEntry.getKey();
-      Set<Dimension> dimSet = dimColEntry.getValue();
-      for (Dimension dim : dimSet) {
+      Set<Aliased<Dimension>> dimSet = dimColEntry.getValue();
+      for (Aliased<Dimension> dim : dimSet) {
         OptionalDimCtx optdim = cubeql.getOptionalDimensionMap().get(dim);
         if (optdim != null) {
           candidatesReachableThroughRefs.addAll(optdim.requiredForCandidates);
         }
       }
-      for (Dimension dim : dimSet) {
+      for (Aliased<Dimension> dim : dimSet) {
         if (removedCandidates.get(dim) != null) {
           for (CandidateTable candidate : removedCandidates.get(dim)) {
             if (!candidatesReachableThroughRefs.contains(candidate)) {
@@ -582,11 +574,11 @@ class CandidateTableResolver implements ContextRewriter {
     // F4 | Not evaluable | evaluable through D6
     // F5 | Directly available | Directly available
     // F6 | Directly available | Not evaluable
-    for (Map.Entry<QueriedExprColumn, Set<Dimension>> exprColEntry : cubeql.getExprColToDim().entrySet()) {
+    for (Map.Entry<QueriedExprColumn, Set<Aliased<Dimension>>> exprColEntry : cubeql.getExprColToDim().entrySet()) {
       QueriedExprColumn col = exprColEntry.getKey();
-      Set<Dimension> dimSet = exprColEntry.getValue();
+      Set<Aliased<Dimension>> dimSet = exprColEntry.getValue();
       ExpressionContext ec = cubeql.getExprCtx().getExpressionContext(col.getExprCol(), col.getAlias());
-      for (Dimension dim : dimSet) {
+      for (Aliased<Dimension> dim : dimSet) {
         if (removedCandidates.get(dim) != null) {
           for (CandidateTable candidate : removedCandidates.get(dim)) {
             // check if evaluable expressions of this candidate are no more evaluable because dimension is not reachable
@@ -595,7 +587,7 @@ class CandidateTableResolver implements ContextRewriter {
               Iterator<ExprSpecContext> escIter = ec.getEvaluableExpressions().get(candidate).iterator();
               while (escIter.hasNext()) {
                 ExprSpecContext esc = escIter.next();
-                if (esc.getExprDims().contains(dim)) {
+                if (esc.getExprDims().contains(dim.getObject())) {
                   escIter.remove();
                 }
               }
@@ -623,16 +615,16 @@ class CandidateTableResolver implements ContextRewriter {
     }
 
     // remove optional dims which are not required any more.
-    Set<Dimension> tobeRemoved = new HashSet<Dimension>();
-    for (Map.Entry<Dimension, OptionalDimCtx> optdimEntry : cubeql.getOptionalDimensionMap().entrySet()) {
-      Dimension dim = optdimEntry.getKey();
+    Set<Aliased<Dimension>> tobeRemoved = new HashSet<>();
+    for (Map.Entry<Aliased<Dimension>, OptionalDimCtx> optdimEntry : cubeql.getOptionalDimensionMap().entrySet()) {
+      Aliased<Dimension> dim = optdimEntry.getKey();
       OptionalDimCtx optdim = optdimEntry.getValue();
       if ((!optdim.colQueried.isEmpty() && optdim.requiredForCandidates.isEmpty()) && !optdim.isRequiredInJoinChain) {
         log.info("Not considering optional dimension {} as all requiredForCandidates are removed", dim);
         tobeRemoved.add(dim);
       }
     }
-    for (Dimension dim : tobeRemoved) {
+    for (Aliased<Dimension> dim : tobeRemoved) {
       removeOptionalDim(cubeql, dim);
     }
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/c7451f8e/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 06c2a0b..3e930de 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -96,13 +96,14 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
 
   @Getter
   // Mapping of a qualified column name to its table alias
-  private final Map<String, String> colToTableAlias = new HashMap<String, String>();
+  private final Map<String, String> colToTableAlias = new HashMap<>();
 
-  @Getter()
-  private final Set<Set<CandidateFact>> candidateFactSets = new HashSet<Set<CandidateFact>>();
+  @Getter
+  private final Set<Set<CandidateFact>> candidateFactSets = new HashSet<>();
 
+  @Getter
   // would be added through join chains and de-normalized resolver
-  protected Map<Dimension, OptionalDimCtx> optionalDimensions = new HashMap<Dimension, OptionalDimCtx>();
+  protected Map<Aliased<Dimension>, OptionalDimCtx> optionalDimensionMap = new HashMap<>();
 
   // Alias to table object mapping of tables accessed in this query
   @Getter
@@ -351,10 +352,10 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
 
   // map of ref column in query to set of Dimension that have the column - which are added as optional dims
   @Getter
-  private Map<String, Set<Dimension>>  refColToDim = Maps.newHashMap();
+  private Map<String, Set<Aliased<Dimension>>>  refColToDim = Maps.newHashMap();
 
-  public void updateRefColDim(String col, Dimension dim) {
-    Set<Dimension> refDims = refColToDim.get(col.toLowerCase());
+  public void updateRefColDim(String col, Aliased<Dimension> dim) {
+    Set<Aliased<Dimension>> refDims = refColToDim.get(col.toLowerCase());
     if (refDims == null) {
       refDims = Sets.newHashSet();
       refColToDim.put(col.toLowerCase(), refDims);
@@ -371,12 +372,12 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   // map of expression column in query to set of Dimension that are accessed in the expression column - which are added
   // as optional dims
   @Getter
-  private Map<QueriedExprColumn, Set<Dimension>>  exprColToDim = Maps.newHashMap();
+  private Map<QueriedExprColumn, Set<Aliased<Dimension>>>  exprColToDim = Maps.newHashMap();
 
-  public void updateExprColDim(String tblAlias, String col, Dimension dim) {
+  public void updateExprColDim(String tblAlias, String col, Aliased<Dimension> dim) {
 
     QueriedExprColumn qexpr = new QueriedExprColumn(col, tblAlias);
-    Set<Dimension> exprDims = exprColToDim.get(qexpr);
+    Set<Aliased<Dimension>> exprDims = exprColToDim.get(qexpr);
     if (exprDims == null) {
       exprDims = Sets.newHashSet();
       exprColToDim.put(qexpr, exprDims);
@@ -419,10 +420,11 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
       throw new LensException(LensCubeErrorCode.QUERIED_TABLE_NOT_FOUND.getLensErrorInfo(), alias);
     }
     Dimension dim = (Dimension) cubeTbls.get(alias);
-    OptionalDimCtx optDim = optionalDimensions.get(dim);
+    Aliased<Dimension> aliasedDim = Aliased.create(dim, alias);
+    OptionalDimCtx optDim = optionalDimensionMap.get(aliasedDim);
     if (optDim == null) {
       optDim = new OptionalDimCtx();
-      optionalDimensions.put(dim, optDim);
+      optionalDimensionMap.put(aliasedDim, optDim);
     }
     if (cols != null && candidate != null) {
       for (String col : cols) {
@@ -432,16 +434,16 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
     }
     if (cubeCol != null) {
       if (isRef) {
-        updateRefColDim(cubeCol, dim);
+        updateRefColDim(cubeCol, aliasedDim);
       } else {
-        updateExprColDim(tableAlias, cubeCol, dim);
+        updateExprColDim(tableAlias, cubeCol, aliasedDim);
       }
     }
     if (!optDim.isRequiredInJoinChain) {
       optDim.isRequiredInJoinChain = isRequiredInJoin;
     }
     if (log.isDebugEnabled()) {
-      log.debug("Adding optional dimension:{} optDim:{} {} isRef:{}", dim, optDim,
+      log.debug("Adding optional dimension:{} optDim:{} {} isRef:{}", aliasedDim, optDim,
         (cubeCol == null ? "" : " for column:" + cubeCol), isRef);
     }
   }
@@ -872,6 +874,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   public String toHQL() throws LensException {
     Set<CandidateFact> cfacts = pickCandidateFactToQuery();
     Map<Dimension, CandidateDim> dimsToQuery = pickCandidateDimsToQuery(dimensions);
+    log.info("facts:{}, dimsToQuery: {}", cfacts, dimsToQuery);
     if (autoJoinCtx != null) {
       // prune join paths for picked fact and dimensions
       autoJoinCtx.pruneAllPaths(cube, cfacts, dimsToQuery);
@@ -908,6 +911,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
       exprDimensions.addAll(exprCtx.rewriteExprCtx(null, dimsToQuery, this));
     }
     dimsToQuery.putAll(pickCandidateDimsToQuery(exprDimensions));
+    log.info("facts:{}, dimsToQuery: {}", cfacts, dimsToQuery);
 
     // pick denorm tables for the picked fact and dimensions
     Set<Dimension> denormTables = new HashSet<Dimension>();
@@ -923,6 +927,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
       denormTables.addAll(deNormCtx.rewriteDenormctx(null, dimsToQuery, false));
     }
     dimsToQuery.putAll(pickCandidateDimsToQuery(denormTables));
+    log.info("facts:{}, dimsToQuery: {}", cfacts, dimsToQuery);
     // Prune join paths once denorm tables are picked
     if (autoJoinCtx != null) {
       // prune join paths for picked fact and dimensions
@@ -1129,12 +1134,8 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
     }
   }
 
-  public Set<Dimension> getOptionalDimensions() {
-    return optionalDimensions.keySet();
-  }
-
-  public Map<Dimension, OptionalDimCtx> getOptionalDimensionMap() {
-    return optionalDimensions;
+  public Set<Aliased<Dimension>> getOptionalDimensions() {
+    return optionalDimensionMap.keySet();
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/lens/blob/c7451f8e/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 26514d8..5ff265d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -151,6 +151,7 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     void addDirectlyAvailable(CandidateTable cTable) {
+      log.debug("Directly available in {}", cTable);
       directlyAvailableIn.add(cTable);
     }
 
@@ -447,6 +448,7 @@ class ExpressionResolver implements ContextRewriter {
         }
         // Replace picked expressions in all the base trees
         replacePickedExpressions(queryAST);
+        log.debug("Picked expressions: {}", pickedExpressions);
         for (Set<PickedExpression> peSet : pickedExpressions.values()) {
           for (PickedExpression pe : peSet) {
             exprDims.addAll(pe.pickedCtx.exprDims);
@@ -518,6 +520,7 @@ class ExpressionResolver implements ContextRewriter {
         for (ExpressionContext ec : ecSet) {
           if (ec.getSrcTable().getName().equals(cTable.getBaseTable().getName())) {
             if (!ec.directlyAvailableIn.contains(cTable)) {
+              log.debug("{} is not directly evaluable in {}", ec, cTable);
               if (ec.evaluableExpressions.get(cTable) != null && !ec.evaluableExpressions.get(cTable).isEmpty()) {
                 // pick first evaluable expression
                 Set<PickedExpression> peSet = pickedExpressions.get(ecEntry.getKey());

http://git-wip-us.apache.org/repos/asf/lens/blob/c7451f8e/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index de5f95e..46b6bb7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -156,7 +156,9 @@ class StorageTableResolver implements ContextRewriter {
 
   private void resolveDimStorageTablesAndPartitions(CubeQueryContext cubeql) throws LensException {
     Set<Dimension> allDims = new HashSet<Dimension>(cubeql.getDimensions());
-    allDims.addAll(cubeql.getOptionalDimensions());
+    for (Aliased<Dimension> dim : cubeql.getOptionalDimensions()) {
+      allDims.add(dim.getObject());
+    }
     for (Dimension dim : allDims) {
       Set<CandidateDim> dimTables = cubeql.getCandidateDimTables().get(dim);
       if (dimTables == null || dimTables.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/lens/blob/c7451f8e/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
index 993955a..4c30d3f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
@@ -57,10 +57,10 @@ public class AutoJoinContext {
   // Map of a joined table to its columns which are part of any of the join
   // paths. This is used in candidate table resolver
   @Getter
-  private Map<Dimension, Map<AbstractCubeTable, List<String>>> joinPathFromColumns = new HashMap<>();
+  private Map<Aliased<Dimension>, Map<AbstractCubeTable, List<String>>> joinPathFromColumns = new HashMap<>();
 
   @Getter
-  private Map<Dimension, Map<AbstractCubeTable, List<String>>> joinPathToColumns = new HashMap<>();
+  private Map<Aliased<Dimension>, Map<AbstractCubeTable, List<String>>> joinPathToColumns = new HashMap<>();
 
   // there can be separate join clause for each fact in-case of multi fact queries
   @Getter
@@ -122,12 +122,12 @@ public class AutoJoinContext {
       Map<AbstractCubeTable, List<String>> toColPaths = joinPathToColumns.get(joinPathEntry.getKey().getObject());
       if (fromColPaths == null) {
         fromColPaths = new HashMap<>();
-        joinPathFromColumns.put(joinPathEntry.getKey().getObject(), fromColPaths);
+        joinPathFromColumns.put(joinPathEntry.getKey(), fromColPaths);
       }
 
       if (toColPaths == null) {
         toColPaths = new HashMap<>();
-        joinPathToColumns.put(joinPathEntry.getKey().getObject(), toColPaths);
+        joinPathToColumns.put(joinPathEntry.getKey(), toColPaths);
       }
       populateJoinPathCols(joinPaths, fromColPaths, toColPaths);
     }
@@ -159,8 +159,8 @@ public class AutoJoinContext {
     }
   }
 
-  public void removeJoinedTable(Dimension dim) {
-    allPaths.remove(Aliased.create(dim));
+  public void removeJoinedTable(Aliased<Dimension> dim) {
+    allPaths.remove(dim);
     joinPathFromColumns.remove(dim);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/c7451f8e/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 4366938..caea3af 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -563,6 +563,10 @@ public class CubeTestSetup {
     chainRefs.add(new ChainRefCol("timedatechain2", "full_date"));
     cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("test_time_dim2", "date", "chained dim"),
       "Timedim full date", chainRefs, null, null, null, null));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("cityid1", "int", "id to city"),
+      "City1", null, null, null));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("cityid2", "int", "id to city"),
+      "City2", null, null, null));
 
     Map<String, JoinChain> joinChains = new HashMap<>();
     addCubeChains(joinChains, TEST_CUBE_NAME);
@@ -717,6 +721,26 @@ public class CubeTestSetup {
         });
       }
     });
+    joinChains.put("cubeCity1", new JoinChain("cubeCity1", "cube-city", "city thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "cityid1"));
+            add(new TableReference("citydim", "id"));
+          }
+        });
+      }
+    });
+    joinChains.put("cubeCity2", new JoinChain("cubeCity2", "cube-city", "city thru cube") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference(cubeName, "cityid2"));
+            add(new TableReference("citydim", "id"));
+          }
+        });
+      }
+    });
     joinChains.put("cubeState",  new JoinChain("cubeState", "cube-state", "state thru cube") {
       {
         addPath(new ArrayList<TableReference>() {
@@ -1345,6 +1369,7 @@ public class CubeTestSetup {
     // add dimensions of the cube
     factColumns.add(new FieldSchema("zipcode", "int", "zip"));
     factColumns.add(new FieldSchema("cityid", "int", "city id"));
+    factColumns.add(new FieldSchema("cityid1", "int", "city id"));
     factColumns.add(new FieldSchema("stateid", "int", "city id"));
     factColumns.add(new FieldSchema("test_time_dim_day_id", "int", "time id"));
     factColumns.add(new FieldSchema("test_time_dim_day_id2", "int", "time id"));
@@ -1580,6 +1605,7 @@ public class CubeTestSetup {
     // add dimensions of the cube
     factColumns.add(new FieldSchema("zipcode", "int", "zip"));
     factColumns.add(new FieldSchema("cityid", "int", "city id"));
+    factColumns.add(new FieldSchema("cityid2", "int", "city id"));
     factColumns.add(new FieldSchema("test_time_dim_hour_id", "int", "time id"));
     factColumns.add(new FieldSchema("test_time_dim_hour_id2", "int", "time id"));
     factColumns.add(new FieldSchema("cdim2", "int", "cycledim id"));
@@ -1714,6 +1740,8 @@ public class CubeTestSetup {
     // add dimensions of the cube
     factColumns.add(new FieldSchema("zipcode", "int", "zip"));
     factColumns.add(new FieldSchema("cityid", "int", "city id"));
+    factColumns.add(new FieldSchema("cityid1", "int", "city id"));
+    factColumns.add(new FieldSchema("cityid2", "int", "city id"));
     factColumns.add(new FieldSchema("stateid", "int", "state id"));
     factColumns.add(new FieldSchema("countryid", "int", "country id"));
     factColumns.add(new FieldSchema("dim1", "string", "dim1"));

http://git-wip-us.apache.org/repos/asf/lens/blob/c7451f8e/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
index a8390ef..d7707a9 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
@@ -267,7 +267,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
     // candidate
     Assert.assertEquals(getLensExceptionErrorMessageInRewrite(
         "select citydim.name, citydim.statename, citydim.nocandidatecol " + "from citydim", conf),
-        "No dimension table has the queried columns " + "for citydim, columns: [name, statename, nocandidatecol]");
+      "No dimension table has the queried columns " + "for citydim, columns: [name, statename, nocandidatecol]");
   }
 
   @Test
@@ -330,6 +330,20 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
   }
 
   @Test
+  public void testTwoFieldsFromDifferentChainButSameTable() throws Exception {
+    String hqlQuery = rewrite("select cubecity1.name, cubecity2.name, msr2 from testcube where " + TWO_DAYS_RANGE,
+      conf);
+    String joinExpr = " join " + getDbName()
+      + "c1_citytable cubecity1 on testcube.cityid1 = cubecity1.id and (cubecity1.dt = 'latest') "
+      + " join " + getDbName()
+      + "c1_citytable cubecity2 on testcube.cityid2 = cubecity2.id and (cubecity2.dt = 'latest')";
+    String expected =
+      getExpectedQuery("testcube", "select cubecity1.name, cubecity2.name, sum(testcube.msr2) FROM ",
+        joinExpr, null, " group by cubecity1.name, cubecity2.name", null,
+        getWhereForHourly2days("testcube", "c1_testfact2_raw"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+  @Test
   public void testDimensionQueryWithTwoRefCols() throws Exception {
     Configuration tConf = new Configuration(conf);
     tConf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "");


[09/51] [abbrv] lens git commit: LENS-890 : Adds per-queue and per-priority driver max launched queries constraints

Posted by de...@apache.org.
LENS-890 : Adds per-queue and per-priority driver max launched queries constraints


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/4d3d2f82
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/4d3d2f82
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/4d3d2f82

Branch: refs/heads/current-release-line
Commit: 4d3d2f82fb93ee4d5c52dc3b4910573953094c0a
Parents: 73f9243
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Tue Dec 15 18:45:08 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Dec 15 18:45:08 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/api/util/CommonUtils.java   |  27 ++-
 .../FactPartitionBasedQueryCostCalculator.java  |   8 +-
 .../org/apache/lens/driver/hive/HiveDriver.java |  68 +++---
 .../apache/lens/driver/hive/TestHiveDriver.java | 233 ++++++++++++-------
 .../src/test/resources/priority_tests.data      |   1 +
 .../server/api/driver/AbstractLensDriver.java   |  13 +-
 .../lens/server/api/driver/LensDriver.java      |  13 +-
 .../server/api/query/AbstractQueryContext.java  |   9 +-
 .../lens/server/api/query/QueryContext.java     |  12 +-
 .../MaxConcurrentDriverQueriesConstraint.java   |  54 ++++-
 ...oncurrentDriverQueriesConstraintFactory.java |  49 +++-
 .../api/query/TestAbstractQueryContext.java     |   4 +-
 ...axConcurrentDriverQueriesConstraintTest.java | 181 +++++++++++++-
 .../server/query/QueryExecutionServiceImpl.java |   1 +
 .../ThreadSafeEstimatedQueryCollectionTest.java |   3 +-
 15 files changed, 527 insertions(+), 149 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-api/src/main/java/org/apache/lens/api/util/CommonUtils.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/util/CommonUtils.java b/lens-api/src/main/java/org/apache/lens/api/util/CommonUtils.java
index 38d58c7..119c924 100644
--- a/lens-api/src/main/java/org/apache/lens/api/util/CommonUtils.java
+++ b/lens-api/src/main/java/org/apache/lens/api/util/CommonUtils.java
@@ -27,6 +27,25 @@ public class CommonUtils {
 
   }
 
+  public interface EntryParser<K, V> {
+    K parseKey(String str);
+
+    V parseValue(String str);
+  }
+
+  private static EntryParser<String, String> defaultEntryParser = new EntryParser<String, String>() {
+    @Override
+    public String parseKey(String str) {
+      return str;
+    }
+
+    @Override
+    public String parseValue(String str) {
+      return str;
+    }
+  };
+
+
   /**
    * Splits given String str around non-escaped commas. Then parses each of the split element
    * as map entries in the format `key=value`. Constructs a map of such entries.
@@ -36,7 +55,11 @@ public class CommonUtils {
    * @return parsed map
    */
   public static Map<String, String> parseMapFromString(String str) {
-    Map<String, String> map = new HashMap<>();
+    return parseMapFromString(str, defaultEntryParser);
+  }
+
+  public static <K, V> Map<K, V> parseMapFromString(String str, EntryParser<K, V> parser) {
+    Map<K, V> map = new HashMap<>();
     if (str != null) {
       for (String kv : str.split("(?<!\\\\),")) {
         if (!kv.isEmpty()) {
@@ -49,7 +72,7 @@ public class CommonUtils {
           if (kvArray.length > 1) {
             value = kvArray[1].replaceAll("\\\\,", ",").trim();
           }
-          map.put(key, value);
+          map.put(parser.parseKey(key), parser.parseValue(value));
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java b/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java
index d56e1c7..9fecdbc 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java
@@ -47,8 +47,11 @@ public class FactPartitionBasedQueryCostCalculator implements QueryCostCalculato
    */
 
   @SuppressWarnings("unchecked") // required for (Set<FactPartition>) casting
-  private double getTotalPartitionCost(final AbstractQueryContext queryContext, LensDriver driver)
+  private Double getTotalPartitionCost(final AbstractQueryContext queryContext, LensDriver driver)
     throws LensException {
+    if (queryContext.getDriverRewriterPlan(driver) == null) {
+      return null;
+    }
     double cost = 0;
     for (Map.Entry<String, Set<?>> entry : getAllPartitions(queryContext, driver).entrySet()) {
       // Have to do instanceof check, since it can't be handled by polymorphism.
@@ -86,7 +89,8 @@ public class FactPartitionBasedQueryCostCalculator implements QueryCostCalculato
 
   @Override
   public QueryCost calculateCost(final AbstractQueryContext queryContext, LensDriver driver) throws LensException {
-    return new FactPartitionBasedQueryCost(getTotalPartitionCost(queryContext, driver));
+    Double cost = getTotalPartitionCost(queryContext, driver);
+    return cost == null ? null : new FactPartitionBasedQueryCost(cost);
   }
 
   public Map<String, Set<?>> getAllPartitions(AbstractQueryContext queryContext, LensDriver driver) {

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index 253cfc4..7391f47 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -20,8 +20,14 @@ package org.apache.lens.driver.hive;
 
 import static org.apache.lens.server.api.util.LensUtil.getImplementations;
 
-import java.io.*;
-import java.util.*;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import java.util.concurrent.*;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.locks.Lock;
@@ -29,6 +35,7 @@ import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.lens.api.LensConf;
 import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.Priority;
 import org.apache.lens.api.query.QueryHandle;
 import org.apache.lens.api.query.QueryPrepareHandle;
 import org.apache.lens.cube.query.cost.FactPartitionBasedQueryCostCalculator;
@@ -50,7 +57,6 @@ import org.apache.lens.server.api.query.priority.CostToPriorityRangeConf;
 import org.apache.lens.server.api.query.priority.QueryPriorityDecider;
 
 import org.apache.commons.lang.StringUtils;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -66,7 +72,6 @@ import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.type.TypeReference;
 
 import com.google.common.collect.ImmutableSet;
-
 import lombok.Getter;
 import lombok.extern.slf4j.Slf4j;
 
@@ -110,6 +115,7 @@ public class HiveDriver extends AbstractLensDriver {
   private HiveConf hiveConf;
 
   /** The hive handles. */
+  @Getter
   private Map<QueryHandle, OperationHandle> hiveHandles = new ConcurrentHashMap<QueryHandle, OperationHandle>();
 
   /** The orphaned hive sessions. */
@@ -383,10 +389,12 @@ public class HiveDriver extends AbstractLensDriver {
 
   private QueryCost calculateQueryCost(AbstractQueryContext qctx) throws LensException {
     if (qctx.isOlapQuery()) {
-      return queryCostCalculator.calculateCost(qctx, this);
-    } else {
-      return new FactPartitionBasedQueryCost(Double.MAX_VALUE);
+      QueryCost cost = queryCostCalculator.calculateCost(qctx, this);
+      if (cost != null) {
+        return cost;
+      }
     }
+    return new FactPartitionBasedQueryCost(Double.MAX_VALUE);
   }
 
   @Override
@@ -548,22 +556,7 @@ public class HiveDriver extends AbstractLensDriver {
       addPersistentPath(ctx);
       Configuration qdconf = ctx.getDriverConf(this);
       qdconf.set("mapred.job.name", ctx.getQueryHandle().toString());
-      //Query is already explained.
-      log.info("whetherCalculatePriority: {}", whetherCalculatePriority);
-      if (whetherCalculatePriority) {
-        try {
-          // Inside try since non-data fetching queries can also be executed by async method.
-          String priority = ctx.calculateCostAndDecidePriority(this, queryCostCalculator, queryPriorityDecider)
-            .toString();
-          qdconf.set("mapred.job.priority", priority);
-          log.info("set priority to {}", priority);
-        } catch (Exception e) {
-          // not failing query launch when setting priority fails
-          // priority will be set to usually NORMAL - the default in underlying system.
-          log.error("could not set priority for lens session id:{} User query: {}", ctx.getLensSessionIdentifier(),
-            ctx.getUserQuery(), e);
-        }
-      }
+      decidePriority(ctx);
       queryHook.preLaunch(ctx);
       SessionHandle sessionHandle = getSession(ctx);
       OperationHandle op = getClient().executeStatementAsync(sessionHandle, ctx.getSelectedDriverQuery(),
@@ -809,6 +802,27 @@ public class HiveDriver extends AbstractLensDriver {
     return selectionPolicies;
   }
 
+  @Override
+  public Priority decidePriority(QueryContext ctx) {
+    if (whetherCalculatePriority && ctx.getDriverConf(this).get("mapred.job.priority") == null) {
+      try {
+        // Inside try since non-data fetching queries can also be executed by async method.
+        Priority priority = ctx.decidePriority(this, queryPriorityDecider);
+        String priorityStr = priority.toString();
+        ctx.getDriverConf(this).set("mapred.job.priority", priorityStr);
+        log.info("set priority to {}", priority);
+        return priority;
+      } catch (Exception e) {
+        // not failing query launch when setting priority fails
+        // priority will be set to usually NORMAL - the default in underlying system.
+        log.error("could not set priority for lens session id:{} User query: {}", ctx.getLensSessionIdentifier(),
+          ctx.getUserQuery(), e);
+        return null;
+      }
+    }
+    return null;
+  }
+
   protected CLIServiceClient getClient() throws LensException {
     if (isEmbedded) {
       if (embeddedConnection == null) {
@@ -837,7 +851,7 @@ public class HiveDriver extends AbstractLensDriver {
           thriftConnExpiryQueue.offer(connection);
           threadConnections.put(connectionKey, connection);
           log.info("New thrift connection {} for thread: {} for user: {} connection ID={} on driver:{}",
-              connectionClass, Thread.currentThread().getId(), user, connection.getConnId(), getFullyQualifiedName());
+            connectionClass, Thread.currentThread().getId(), user, connection.getConnId(), getFullyQualifiedName());
         } catch (Exception e) {
           throw new LensException(e);
         }
@@ -939,14 +953,14 @@ public class HiveDriver extends AbstractLensDriver {
           hiveSession = getClient().openSession(ctx.getClusterUser(), "");
           lensToHiveSession.put(sessionDbKey, hiveSession);
           log.info("New hive session for user: {} , lens session: {} , hive session handle: {} , driver : {}",
-              ctx.getClusterUser(), sessionDbKey, hiveSession.getHandleIdentifier(), getFullyQualifiedName());
+            ctx.getClusterUser(), sessionDbKey, hiveSession.getHandleIdentifier(), getFullyQualifiedName());
           for (LensEventListener<DriverEvent> eventListener : driverListeners) {
             try {
               eventListener.onEvent(new DriverSessionStarted(System.currentTimeMillis(), this, lensSession, hiveSession
                 .getSessionId().toString()));
             } catch (Exception exc) {
               log.error("Error sending driver {} start event to listener {}", getFullyQualifiedName(), eventListener,
-                 exc);
+                exc);
             }
           }
         } catch (Exception e) {
@@ -1218,7 +1232,7 @@ public class HiveDriver extends AbstractLensDriver {
       if (isSessionInvalid(exc, session)) {
         // We have to expire previous session
         log.info("{} Hive server session {} for lens session {} has become invalid", getFullyQualifiedName(), session,
-            lensSession);
+          lensSession);
         sessionLock.lock();
         try {
           // We should close all connections and clear the session map since

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
index 11efd3c..06552ea 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
@@ -33,7 +33,9 @@ import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.driver.*;
 import org.apache.lens.server.api.driver.DriverQueryStatus.DriverQueryState;
 import org.apache.lens.server.api.error.LensException;
-import org.apache.lens.server.api.query.*;
+import org.apache.lens.server.api.query.ExplainQueryContext;
+import org.apache.lens.server.api.query.PreparedQueryContext;
+import org.apache.lens.server.api.query.QueryContext;
 import org.apache.lens.server.api.query.cost.QueryCost;
 import org.apache.lens.server.api.query.priority.CostRangePriorityDecider;
 import org.apache.lens.server.api.query.priority.CostToPriorityRangeConf;
@@ -41,8 +43,10 @@ import org.apache.lens.server.api.user.MockDriverQueryHook;
 import org.apache.lens.server.api.util.LensUtil;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.ql.HiveDriverRunHook;
@@ -53,6 +57,7 @@ import org.apache.hive.service.cli.ColumnDescriptor;
 
 import org.testng.annotations.*;
 
+import com.beust.jcommander.internal.Maps;
 import com.google.common.collect.Lists;
 
 
@@ -81,6 +86,8 @@ public class TestHiveDriver {
 
   protected String sessionid;
   protected SessionState ss;
+  private CostRangePriorityDecider alwaysNormalPriorityDecider
+    = new CostRangePriorityDecider(new CostToPriorityRangeConf(""));
 
   /**
    * Before test.
@@ -173,6 +180,7 @@ public class TestHiveDriver {
    * @throws Exception the exception
    */
   protected void createTestTable(String tableName) throws Exception {
+    int handleSize = getHandleSize();
     System.out.println("Hadoop Location: " + System.getProperty("hadoop.bin.path"));
     String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(ID STRING)" + " TBLPROPERTIES ('"
       + LensConfConstants.STORAGE_COST + "'='500')";
@@ -186,7 +194,7 @@ public class TestHiveDriver {
     context = createContext(dataLoad, conf);
     resultSet = driver.execute(context);
     assertNull(resultSet);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   /**
@@ -196,6 +204,7 @@ public class TestHiveDriver {
    * @throws Exception the exception
    */
   protected void createPartitionedTable(String tableName) throws Exception {
+    int handleSize = getHandleSize();
     System.out.println("Hadoop Location: " + System.getProperty("hadoop.bin.path"));
     String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(ID STRING)"
       + " PARTITIONED BY (dt string) TBLPROPERTIES ('"
@@ -212,7 +221,7 @@ public class TestHiveDriver {
     context = createContext(dataLoad, conf);
     resultSet = driver.execute(context);
     assertNull(resultSet);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   // Tests
@@ -241,6 +250,7 @@ public class TestHiveDriver {
    */
   @Test
   public void testTemptable() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("test_temp");
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
     Hive.get(conf).dropTable("test_temp_output");
@@ -248,15 +258,15 @@ public class TestHiveDriver {
     QueryContext context = createContext(query, conf);
     LensResultSet resultSet = driver.execute(context);
     assertNull(resultSet);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     // fetch results from temp table
     String select = "SELECT * FROM test_temp_output";
     context = createContext(select, conf);
     resultSet = driver.execute(context);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     validateInMemoryResult(resultSet, "test_temp_output");
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   /**
@@ -266,6 +276,7 @@ public class TestHiveDriver {
    */
   @Test
   public void testExecuteQuery() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("test_execute");
     LensResultSet resultSet = null;
     // Execute a select query
@@ -287,7 +298,7 @@ public class TestHiveDriver {
     context = createContext(select, conf);
     resultSet = driver.execute(context);
     validatePersistentResult(resultSet, TEST_DATA_FILE, context.getHDFSResultDir(), true);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   /**
@@ -383,6 +394,7 @@ public class TestHiveDriver {
    */
   @Test
   public void testExecuteQueryAsync() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("test_execute_sync");
 
     // Now run a command that would fail
@@ -392,11 +404,11 @@ public class TestHiveDriver {
     failConf.set("hive.exec.driver.run.hooks", FailHook.class.getCanonicalName());
     QueryContext context = createContext(expectFail, failConf);
     driver.executeAsync(context);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(context, DriverQueryState.FAILED, true, false);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     driver.closeQuery(context.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     // Async select query
     String select = "SELECT ID FROM test_execute_sync";
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
@@ -404,18 +416,18 @@ public class TestHiveDriver {
     driver.executeAsync(context);
     assertNotNull(context.getDriverConf(driver).get("mapred.job.name"));
     assertNotNull(context.getDriverConf(driver).get("mapred.job.priority"));
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, false, false);
     driver.closeQuery(context.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
     context = createContext(select, conf);
     driver.executeAsync(context);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, true, false);
     driver.closeQuery(context.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     conf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
       "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'"
@@ -424,10 +436,10 @@ public class TestHiveDriver {
     select = "SELECT ID, null, ID FROM test_execute_sync";
     context = createContext(select, conf);
     driver.executeAsync(context);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, true, true);
     driver.closeQuery(context.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   /**
@@ -483,6 +495,7 @@ public class TestHiveDriver {
    */
   @Test
   public void testCancelAsyncQuery() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("test_cancel_async");
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
     QueryContext context = createContext("SELECT ID FROM test_cancel_async", conf);
@@ -491,7 +504,7 @@ public class TestHiveDriver {
     driver.updateStatus(context);
     assertEquals(context.getDriverStatus().getState(), DriverQueryState.CANCELED, "Expecting query to be cancelled");
     driver.closeQuery(context.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     try {
       driver.cancelQuery(context.getQueryHandle());
@@ -512,7 +525,7 @@ public class TestHiveDriver {
    */
   private void validatePersistentResult(LensResultSet resultSet, String dataFile, Path outptuDir, boolean formatNulls)
     throws Exception {
-    assertTrue(resultSet instanceof HivePersistentResultSet);
+    assertTrue(resultSet instanceof HivePersistentResultSet, "resultset class: " + resultSet.getClass().getName());
     HivePersistentResultSet persistentResultSet = (HivePersistentResultSet) resultSet;
     String path = persistentResultSet.getOutputPath();
 
@@ -567,6 +580,7 @@ public class TestHiveDriver {
    */
   @Test
   public void testPersistentResultSet() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("test_persistent_result_set");
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
     conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
@@ -574,14 +588,14 @@ public class TestHiveDriver {
     QueryContext ctx = createContext("SELECT ID FROM test_persistent_result_set", conf);
     LensResultSet resultSet = driver.execute(ctx);
     validatePersistentResult(resultSet, TEST_DATA_FILE, ctx.getHDFSResultDir(), false);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     ctx = createContext("SELECT ID FROM test_persistent_result_set", conf);
     driver.executeAsync(ctx);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(ctx, DriverQueryState.SUCCESSFUL, true, false);
     driver.closeQuery(ctx.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     conf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
       "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'"
@@ -589,17 +603,17 @@ public class TestHiveDriver {
         + " 'field.delim'=','  ) STORED AS TEXTFILE ");
     ctx = createContext("SELECT ID, null, ID FROM test_persistent_result_set", conf);
     resultSet = driver.execute(ctx);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     validatePersistentResult(resultSet, TEST_DATA_FILE, ctx.getHDFSResultDir(), true);
     driver.closeQuery(ctx.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     ctx = createContext("SELECT ID, null, ID FROM test_persistent_result_set", conf);
     driver.executeAsync(ctx);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(ctx, DriverQueryState.SUCCESSFUL, true, true);
     driver.closeQuery(ctx.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   /**
@@ -640,6 +654,22 @@ public class TestHiveDriver {
     SessionState.setCurrentSessionState(ss);
     ExplainQueryContext ctx = createExplainContext("cube SELECT ID FROM test_cube", conf);
     ctx.setOlapQuery(true);
+    ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
+      @Override
+      public String getPlan() {
+        return null;
+      }
+
+      @Override
+      public QueryCost getCost() {
+        return null;
+      }
+
+      @Override
+      public Map<String, Set<?>> getPartitions() {
+        return Maps.newHashMap();
+      }
+    });
     QueryCost cost = driver.estimate(ctx);
     assertEquals(cost.getEstimatedResourceUsage(), 0.0);
     cost.getEstimatedExecTimeMillis();
@@ -666,14 +696,14 @@ public class TestHiveDriver {
    */
   @Test
   public void testExplain() throws Exception {
+    int handleSize = getHandleSize();
     SessionState.setCurrentSessionState(ss);
     SessionState.get().setCurrentDatabase(dataBase);
     createTestTable("test_explain");
-
     DriverQueryPlan plan = driver.explain(createExplainContext("SELECT ID FROM test_explain", conf));
     assertTrue(plan instanceof HiveQueryPlan);
     assertEquals(plan.getTableWeight(dataBase + ".test_explain"), 500.0);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     // test execute prepare
     PreparedQueryContext pctx = new PreparedQueryContext("SELECT ID FROM test_explain", null, conf, drivers);
@@ -686,36 +716,37 @@ public class TestHiveDriver {
     plan = driver.explainAndPrepare(pctx);
     QueryContext qctx = createContext(pctx, inConf);
     LensResultSet result = driver.execute(qctx);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     validateInMemoryResult(result);
 
     // test execute prepare async
+    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
     qctx = createContext(pctx, conf);
     driver.executeAsync(qctx);
     assertNotNull(qctx.getDriverOpHandle());
     validateExecuteAsync(qctx, DriverQueryState.SUCCESSFUL, true, false);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
 
     driver.closeQuery(qctx.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     // for backward compatibility
     qctx = createContext(pctx, inConf);
     qctx.setQueryHandle(new QueryHandle(pctx.getPrepareHandle().getPrepareHandleId()));
     result = driver.execute(qctx);
     assertNotNull(qctx.getDriverOpHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     validateInMemoryResult(result);
     // test execute prepare async
     qctx = createContext(pctx, conf);
     qctx.setQueryHandle(new QueryHandle(pctx.getPrepareHandle().getPrepareHandleId()));
     driver.executeAsync(qctx);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(qctx, DriverQueryState.SUCCESSFUL, true, false);
 
     driver.closeQuery(qctx.getQueryHandle());
     driver.closePreparedQuery(pctx.getPrepareHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   /**
@@ -725,11 +756,12 @@ public class TestHiveDriver {
    */
   @Test
   public void testExplainPartitionedTable() throws Exception {
+    int handleSize = getHandleSize();
     createPartitionedTable("test_part_table");
     // acquire
     SessionState.setCurrentSessionState(ss);
     DriverQueryPlan plan = driver.explain(createExplainContext("SELECT ID FROM test_part_table", conf));
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     assertTrue(plan instanceof HiveQueryPlan);
     assertNotNull(plan.getTablesQueried());
     assertEquals(plan.getTablesQueried().size(), 1);
@@ -749,15 +781,15 @@ public class TestHiveDriver {
    */
   @Test
   public void testExplainOutput() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("explain_test_1");
     createTestTable("explain_test_2");
-
     SessionState.setCurrentSessionState(ss);
     DriverQueryPlan plan = driver.explain(createExplainContext("SELECT explain_test_1.ID, count(1) FROM "
       + " explain_test_1  join explain_test_2 on explain_test_1.ID = explain_test_2.ID"
       + " WHERE explain_test_1.ID = 'foo' or explain_test_2.ID = 'bar'" + " GROUP BY explain_test_1.ID", conf));
 
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     assertTrue(plan instanceof HiveQueryPlan);
     assertNotNull(plan.getTablesQueried());
     assertEquals(plan.getTablesQueried().size(), 2);
@@ -775,6 +807,7 @@ public class TestHiveDriver {
    */
   @Test
   public void testExplainOutputPersistent() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("explain_test_1");
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
     SessionState.setCurrentSessionState(ss);
@@ -784,19 +817,36 @@ public class TestHiveDriver {
     pctx.setLensSessionIdentifier(sessionid);
     DriverQueryPlan plan2 = driver.explainAndPrepare(pctx);
     // assertNotNull(plan2.getResultDestination());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     assertNotNull(plan2.getTablesQueried());
     assertEquals(plan2.getTablesQueried().size(), 1);
     assertTrue(plan2.getTableWeights().containsKey(dataBase + ".explain_test_1"));
     QueryContext ctx = createContext(pctx, conf);
     LensResultSet resultSet = driver.execute(ctx);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     HivePersistentResultSet persistentResultSet = (HivePersistentResultSet) resultSet;
     String path = persistentResultSet.getOutputPath();
     assertEquals(ctx.getDriverResultPath(), path);
     driver.closeQuery(plan2.getHandle());
   }
 
+  @DataProvider
+  public Object[][] priorityDataProvider() throws IOException, ParseException {
+    BufferedReader br = new BufferedReader(new InputStreamReader(
+      TestHiveDriver.class.getResourceAsStream("/priority_tests.data")));
+    String line;
+    int numTests = Integer.parseInt(br.readLine());
+    Object[][] data = new Object[numTests][2];
+    for (int i = 0; i < numTests; i++) {
+      String[] kv = br.readLine().split("\\s*:\\s*");
+      final Set<FactPartition> partitions = getFactParts(Arrays.asList(kv[0].trim().split("\\s*,\\s*")));
+      final Priority expected = Priority.valueOf(kv[1]);
+      data[i] = new Object[]{partitions, expected};
+    }
+    return data;
+  }
+
+
   /**
    * Testing Duration Based Priority Logic by mocking everything except partitions.
    *
@@ -804,57 +854,47 @@ public class TestHiveDriver {
    * @throws LensException
    * @throws ParseException
    */
-  @Test
-  public void testPriority() throws IOException, LensException, ParseException {
+  @Test(dataProvider = "priorityDataProvider")
+  public void testPriority(final Set<FactPartition> partitions, Priority expected) throws Exception {
     Configuration conf = new Configuration();
-    CostRangePriorityDecider alwaysNormalPriorityDecider =
-      new CostRangePriorityDecider(new CostToPriorityRangeConf(""));
-    BufferedReader br = new BufferedReader(new InputStreamReader(
-      TestHiveDriver.class.getResourceAsStream("/priority_tests.data")));
-    String line;
-    int i = 0;
-    while ((line = br.readLine()) != null) {
-      String[] kv = line.split("\\s*:\\s*");
+    QueryContext ctx = createContext("test priority query", conf);
+    ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
 
-      final Set<FactPartition> partitions = getFactParts(Arrays.asList(kv[0].trim().split("\\s*,\\s*")));
-      final Priority expected = Priority.valueOf(kv[1]);
-      QueryContext ctx = createContext("test priority query", conf);
-      ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
+      @Override
+      public String getPlan() {
+        return null;
+      }
 
-        @Override
-        public String getPlan() {
-          return null;
-        }
+      @Override
+      public QueryCost getCost() {
+        return null;
+      }
+    });
 
-        @Override
-        public QueryCost getCost() {
-          return null;
+    ctx.getDriverContext().getDriverRewriterPlan(driver).getPartitions().putAll(
+      new HashMap<String, Set<FactPartition>>() {
+        {
+          put("table1", partitions);
         }
       });
+    // table weights only for first calculation
+    ctx.getDriverContext().getDriverRewriterPlan(driver).getTableWeights().putAll(
+      new HashMap<String, Double>() {
+        {
+          put("table1", 1.0);
+        }
+      });
+    ctx.setOlapQuery(true);
+    Priority priority = driver.decidePriority(ctx);
+    assertEquals(priority, expected, "cost: " + ctx.getDriverQueryCost(driver) + "priority: " + priority);
+    assertEquals(ctx.decidePriority(driver,
+      alwaysNormalPriorityDecider), Priority.NORMAL);
+  }
 
-      ctx.getDriverContext().getDriverRewriterPlan(driver).getPartitions().putAll(
-        new HashMap<String, Set<FactPartition>>() {
-          {
-            put("table1", partitions);
-          }
-        });
-      if (i < 1) {
-        // table weights only for first calculation
-        ctx.getDriverContext().getDriverRewriterPlan(driver).getTableWeights().putAll(
-          new HashMap<String, Double>() {
-            {
-              put("table1", 1.0);
-            }
-          });
-      }
-      assertEquals(ctx.calculateCostAndDecidePriority(driver, driver.queryCostCalculator,
-        driver.queryPriorityDecider), expected);
-      assertEquals(ctx.calculateCostAndDecidePriority(driver, driver.queryCostCalculator,
-        alwaysNormalPriorityDecider), Priority.NORMAL);
-      i++;
-    }
+  @Test
+  public void testPriorityWithoutFactPartitions() throws LensException {
     // test priority without fact partitions
-    AbstractQueryContext ctx = createContext("test priority query", conf);
+    QueryContext ctx = createContext("test priority query", conf);
     ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
 
       @Override
@@ -881,15 +921,25 @@ public class TestHiveDriver {
         }
       });
     ctx.setDriverCost(driver, driver.queryCostCalculator.calculateCost(ctx, driver));
-    assertEquals(Priority.VERY_HIGH, driver.queryPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)));
-    assertEquals(Priority.NORMAL, alwaysNormalPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)));
+    assertEquals(driver.decidePriority(ctx), Priority.VERY_HIGH);
+    assertEquals(alwaysNormalPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)), Priority.NORMAL);
 
     // test priority without rewriter plan
     ctx = createContext("test priority query", conf);
-    ctx.setDriverCost(driver, driver.queryCostCalculator.calculateCost(ctx, driver));
-    assertEquals(Priority.VERY_HIGH, driver.queryPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)));
-    assertEquals(Priority.NORMAL, alwaysNormalPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)));
+    ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
+      @Override
+      public String getPlan() {
+        return null;
+      }
 
+      @Override
+      public QueryCost getCost() {
+        return null;
+      }
+    });
+    ctx.setDriverCost(driver, driver.queryCostCalculator.calculateCost(ctx, driver));
+    assertEquals(driver.decidePriority(ctx), Priority.VERY_HIGH);
+    assertEquals(alwaysNormalPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)), Priority.NORMAL);
   }
 
   private Set<FactPartition> getFactParts(List<String> partStrings) throws ParseException {
@@ -915,4 +965,13 @@ public class TestHiveDriver {
     }
     return factParts;
   }
+
+  private int getHandleSize() {
+    return driver.getHiveHandleSize();
+  }
+
+  private void assertHandleSize(int handleSize) {
+    assertEquals(getHandleSize(), handleSize, "Unexpected handle size, all handles: "
+      + driver.getHiveHandles());
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-driver-hive/src/test/resources/priority_tests.data
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/resources/priority_tests.data b/lens-driver-hive/src/test/resources/priority_tests.data
index 98b82ef..177743e 100644
--- a/lens-driver-hive/src/test/resources/priority_tests.data
+++ b/lens-driver-hive/src/test/resources/priority_tests.data
@@ -1,3 +1,4 @@
+4
 dt 2014-01-02-01: VERY_HIGH
 dt 2013-12,dt 2014-01-01, dt 2014-01-02-00, dt 2014-01-02-01: HIGH
 dt 2013-12,dt 2014-01, dt 2014-02, dt 2014-02-01-00: NORMAL

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java
index 55f1535..ed1fc43 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java
@@ -19,11 +19,12 @@
 package org.apache.lens.server.api.driver;
 
 
+import org.apache.lens.api.Priority;
 import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.error.LensException;
+import org.apache.lens.server.api.query.QueryContext;
 
 import org.apache.commons.lang.StringUtils;
-
 import org.apache.hadoop.conf.Configuration;
 
 import lombok.Getter;
@@ -49,7 +50,7 @@ public abstract class AbstractLensDriver implements LensDriver {
     if (StringUtils.isBlank(driverType) || StringUtils.isBlank(driverName)) {
       throw new LensException("Driver Type and Name can not be null or empty");
     }
-    fullyQualifiedName =  new StringBuilder(driverType).append(SEPARATOR).append(driverName).toString();
+    fullyQualifiedName = new StringBuilder(driverType).append(SEPARATOR).append(driverName).toString();
   }
 
   /**
@@ -61,7 +62,13 @@ public abstract class AbstractLensDriver implements LensDriver {
    */
   protected String getDriverResourcePath(String resourceName) {
     return new StringBuilder(LensConfConstants.DRIVERS_BASE_DIR).append(SEPARATOR).append(getFullyQualifiedName())
-        .append(SEPARATOR).append(resourceName).toString();
+      .append(SEPARATOR).append(resourceName).toString();
+  }
+
+  @Override
+  public Priority decidePriority(QueryContext queryContext) {
+    // no-op by default
+    return null;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
index c220884..3d38ddd 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
@@ -20,11 +20,14 @@ package org.apache.lens.server.api.driver;
 
 import java.io.Externalizable;
 
+import org.apache.lens.api.Priority;
 import org.apache.lens.api.query.QueryHandle;
 import org.apache.lens.api.query.QueryPrepareHandle;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.events.LensEventListener;
-import org.apache.lens.server.api.query.*;
+import org.apache.lens.server.api.query.AbstractQueryContext;
+import org.apache.lens.server.api.query.PreparedQueryContext;
+import org.apache.lens.server.api.query.QueryContext;
 import org.apache.lens.server.api.query.collect.WaitingQueriesSelectionPolicy;
 import org.apache.lens.server.api.query.constraint.QueryLaunchingConstraint;
 import org.apache.lens.server.api.query.cost.QueryCost;
@@ -208,4 +211,12 @@ public interface LensDriver extends Externalizable {
    * (Examples: hive/hive1, jdbc/mysql1 )
    */
   String getFullyQualifiedName();
+
+  /**
+   * decide priority based on query's cost. The cost should be already computed by estimate call, but it's
+   * not guaranteed to be pre-computed. It's up to the driver to do an on-demand computation of cost.
+   * @see QueryContext#decidePriority(LensDriver, QueryPriorityDecider) that handles this on-demand computation.
+   * @param queryContext
+   */
+  Priority decidePriority(QueryContext queryContext);
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
index 2f20113..62ed293 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
@@ -19,7 +19,10 @@
 package org.apache.lens.server.api.query;
 
 import java.io.Serializable;
-import java.util.*;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
@@ -206,6 +209,10 @@ public abstract class AbstractQueryContext implements Serializable {
     return getDriverContext().getDriverRewriterPlan(driver);
   }
 
+  public String getQueue() {
+    return getConf().get(LensConfConstants.MAPRED_JOB_QUEUE_NAME);
+  }
+
   /**
    * Runnable to wrap estimate computation for a driver. Failure cause and success status
    * are stored as field members

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
index b637665..1269e45 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
@@ -34,7 +34,6 @@ import org.apache.lens.server.api.driver.LensDriver;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.query.collect.WaitingQueriesSelectionPolicy;
 import org.apache.lens.server.api.query.constraint.QueryLaunchingConstraint;
-import org.apache.lens.server.api.query.cost.QueryCostCalculator;
 import org.apache.lens.server.api.query.priority.QueryPriorityDecider;
 
 import org.apache.hadoop.conf.Configuration;
@@ -226,7 +225,6 @@ public class QueryContext extends AbstractQueryContext {
     this.submissionTime = submissionTime;
     this.queryHandle = new QueryHandle(UUID.randomUUID());
     this.status = new QueryStatus(0.0f, null, Status.NEW, "Query just got created", false, null, null, null);
-    this.priority = Priority.NORMAL;
     this.lensConf = qconf;
     this.conf = conf;
     this.isPersistent = conf.getBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_SET,
@@ -429,15 +427,17 @@ public class QueryContext extends AbstractQueryContext {
   }
 
   public Priority decidePriority(LensDriver driver, QueryPriorityDecider queryPriorityDecider) throws LensException {
+    // On-demand re-computation of cost, in case it's not alredy set by a previous estimate call.
+    // In driver test cases, estimate doesn't happen. Hence this code path ensures cost is computed and
+    // priority is set based on correct cost.
+    calculateCost(driver);
     priority = queryPriorityDecider.decidePriority(getDriverQueryCost(driver));
     return priority;
   }
 
-  public Priority calculateCostAndDecidePriority(LensDriver driver, QueryCostCalculator queryCostCalculator,
-    QueryPriorityDecider queryPriorityDecider) throws LensException {
+  private void calculateCost(LensDriver driver) throws LensException {
     if (getDriverQueryCost(driver) == null) {
-      setDriverCost(driver, queryCostCalculator.calculateCost(this, driver));
+      setDriverCost(driver, driver.estimate(this));
     }
-    return decidePriority(driver, queryPriorityDecider);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraint.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraint.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraint.java
index bae2e64..58ebd9a 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraint.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraint.java
@@ -19,29 +19,71 @@
 
 package org.apache.lens.server.api.query.constraint;
 
+import java.util.Map;
+
+import org.apache.lens.api.Priority;
 import org.apache.lens.server.api.driver.LensDriver;
 import org.apache.lens.server.api.query.QueryContext;
 import org.apache.lens.server.api.query.collect.EstimatedImmutableQueryCollection;
 
+import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
+@RequiredArgsConstructor
 public class MaxConcurrentDriverQueriesConstraint implements QueryLaunchingConstraint {
 
   private final int maxConcurrentQueries;
-
-  public MaxConcurrentDriverQueriesConstraint(final int maxConcurrentQueries) {
-    this.maxConcurrentQueries = maxConcurrentQueries;
-  }
+  private final Map<String, Integer> maxConcurrentQueriesPerQueue;
+  private final Map<Priority, Integer> maxConcurrentQueriesPerPriority;
 
   @Override
   public boolean allowsLaunchOf(
     final QueryContext candidateQuery, final EstimatedImmutableQueryCollection launchedQueries) {
 
     final LensDriver selectedDriver = candidateQuery.getSelectedDriver();
-    final boolean canLaunch = (launchedQueries.getQueriesCount(selectedDriver) < maxConcurrentQueries);
-
+    final boolean canLaunch = (launchedQueries.getQueriesCount(selectedDriver) < maxConcurrentQueries)
+      && canLaunchWithQueueConstraint(candidateQuery, launchedQueries)
+      && canLaunchWithPriorityConstraint(candidateQuery, launchedQueries);
     log.debug("canLaunch:{}", canLaunch);
     return canLaunch;
   }
+
+  private boolean canLaunchWithQueueConstraint(QueryContext candidateQuery, EstimatedImmutableQueryCollection
+    launchedQueries) {
+    if (maxConcurrentQueriesPerQueue == null) {
+      return true;
+    }
+    String queue = candidateQuery.getQueue();
+    Integer limit = maxConcurrentQueriesPerQueue.get(queue);
+    if (limit == null) {
+      return true;
+    }
+    int launchedOnQueue = 0;
+    for (QueryContext context : launchedQueries.getQueries(candidateQuery.getSelectedDriver())) {
+      if (context.getQueue().equals(queue)) {
+        launchedOnQueue++;
+      }
+    }
+    return launchedOnQueue < limit;
+  }
+
+  private boolean canLaunchWithPriorityConstraint(QueryContext candidateQuery, EstimatedImmutableQueryCollection
+    launchedQueries) {
+    if (maxConcurrentQueriesPerPriority == null) {
+      return true;
+    }
+    Priority priority = candidateQuery.getPriority();
+    Integer limit = maxConcurrentQueriesPerPriority.get(priority);
+    if (limit == null) {
+      return true;
+    }
+    int launchedOnPriority = 0;
+    for (QueryContext context : launchedQueries.getQueries(candidateQuery.getSelectedDriver())) {
+      if (context.getPriority().equals(priority)) {
+        launchedOnPriority++;
+      }
+    }
+    return launchedOnPriority < limit;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintFactory.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintFactory.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintFactory.java
index b6e6c2f..6db7da7 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintFactory.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintFactory.java
@@ -19,21 +19,60 @@
 
 package org.apache.lens.server.api.query.constraint;
 
-import static java.lang.Integer.parseInt;
+import static org.apache.lens.api.util.CommonUtils.parseMapFromString;
 
+import java.util.Map;
+
+import org.apache.lens.api.Priority;
+import org.apache.lens.api.util.CommonUtils.EntryParser;
 import org.apache.lens.server.api.common.ConfigBasedObjectCreationFactory;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 
 public class MaxConcurrentDriverQueriesConstraintFactory
-    implements ConfigBasedObjectCreationFactory<QueryLaunchingConstraint> {
+  implements ConfigBasedObjectCreationFactory<MaxConcurrentDriverQueriesConstraint> {
 
   public static final String MAX_CONCURRENT_QUERIES_KEY = "driver.max.concurrent.launched.queries";
+  private static final String PREFIX = MAX_CONCURRENT_QUERIES_KEY + ".per.";
+  public static final String MAX_CONCURRENT_QUERIES_PER_QUEUE_KEY = PREFIX + "queue";
+  public static final String MAX_CONCURRENT_QUERIES_PER_PRIORITY_KEY = PREFIX + "priority";
+  private static final EntryParser<String, Integer> STRING_INT_PARSER = new EntryParser<String, Integer>() {
+    @Override
+    public String parseKey(String str) {
+      return str;
+    }
+
+    @Override
+    public Integer parseValue(String str) {
+      return Integer.valueOf(str);
+    }
+  };
+  private static final EntryParser<Priority, Integer> PRIORITY_INT_PARSER = new EntryParser<Priority, Integer>() {
+    @Override
+    public Priority parseKey(String str) {
+      return Priority.valueOf(str.toUpperCase());
+    }
+
+    @Override
+    public Integer parseValue(String str) {
+      return Integer.valueOf(str);
+    }
+  };
 
   @Override
-  public QueryLaunchingConstraint create(final Configuration conf) {
+  public MaxConcurrentDriverQueriesConstraint create(final Configuration conf) {
+    String maxConcurrentQueriesValue = conf.get(MAX_CONCURRENT_QUERIES_KEY);
+    Map<String, Integer> maxConcurrentQueriesPerQueue = parseMapFromString(
+      conf.get(MAX_CONCURRENT_QUERIES_PER_QUEUE_KEY), STRING_INT_PARSER);
+    Map<Priority, Integer> maxConcurrentQueriesPerPriority = parseMapFromString(
+      conf.get(MAX_CONCURRENT_QUERIES_PER_PRIORITY_KEY), PRIORITY_INT_PARSER);
+    int maxConcurrentQueries = Integer.MAX_VALUE;
+    if (!StringUtils.isBlank(maxConcurrentQueriesValue)) {
+      maxConcurrentQueries = Integer.parseInt(maxConcurrentQueriesValue);
+    }
+    return new MaxConcurrentDriverQueriesConstraint(maxConcurrentQueries, maxConcurrentQueriesPerQueue,
+      maxConcurrentQueriesPerPriority);
 
-    int maxConcurrentQueries = parseInt(conf.get(MAX_CONCURRENT_QUERIES_KEY));
-    return new MaxConcurrentDriverQueriesConstraint(maxConcurrentQueries);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java b/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
index a37a4c8..5af45ed 100644
--- a/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
+++ b/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
@@ -33,7 +33,6 @@ import org.apache.lens.server.api.driver.LensDriver;
 import org.apache.lens.server.api.driver.MockDriver;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.metrics.LensMetricsRegistry;
-import org.apache.lens.server.api.query.cost.MockQueryCostCalculator;
 import org.apache.lens.server.api.query.priority.MockQueryPriorityDecider;
 
 import org.apache.hadoop.conf.Configuration;
@@ -101,8 +100,7 @@ public class TestAbstractQueryContext {
   @Test
   public void testPrioritySetting() throws LensException {
     MockQueryContext ctx = new MockQueryContext();
-    Priority p = ctx.calculateCostAndDecidePriority(ctx.getSelectedDriver(), new
-      MockQueryCostCalculator(), new MockQueryPriorityDecider());
+    Priority p = ctx.decidePriority(ctx.getSelectedDriver(), new MockQueryPriorityDecider());
     assertEquals(p, HIGH);
     assertEquals(ctx.getPriority(), HIGH);
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/test/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintTest.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/test/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintTest.java b/lens-server-api/src/test/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintTest.java
index 55a2eea..4031122 100644
--- a/lens-server-api/src/test/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintTest.java
+++ b/lens-server-api/src/test/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintTest.java
@@ -19,10 +19,17 @@
 
 package org.apache.lens.server.api.query.constraint;
 
+import static org.apache.lens.api.Priority.*;
+import static org.apache.lens.server.api.LensServerAPITestUtil.getConfiguration;
+
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 import static org.testng.Assert.assertEquals;
 
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.lens.api.Priority;
 import org.apache.lens.server.api.driver.LensDriver;
 import org.apache.lens.server.api.query.QueryContext;
 import org.apache.lens.server.api.query.collect.EstimatedImmutableQueryCollection;
@@ -30,18 +37,117 @@ import org.apache.lens.server.api.query.collect.EstimatedImmutableQueryCollectio
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
+import junit.framework.Assert;
+import lombok.Data;
+
 public class MaxConcurrentDriverQueriesConstraintTest {
 
+  MaxConcurrentDriverQueriesConstraintFactory factory = new MaxConcurrentDriverQueriesConstraintFactory();
+  QueryLaunchingConstraint constraint = factory.create(getConfiguration(
+    "driver.max.concurrent.launched.queries", 10
+  ));
+  QueryLaunchingConstraint perQueueConstraint = factory.create(getConfiguration(
+    "driver.max.concurrent.launched.queries", 4,
+    "driver.max.concurrent.launched.queries.per.queue", "q1=2,q2=3"
+  ));
+
+  QueryLaunchingConstraint perPriorityConstraint = factory.create(getConfiguration(
+    "driver.max.concurrent.launched.queries", 4,
+    "driver.max.concurrent.launched.queries.per.priority", "NORMAL=2,HIGH=3"
+  ));
+
+  QueryLaunchingConstraint perQueueAndPerPriorityConstraint = factory.create(getConfiguration(
+    "driver.max.concurrent.launched.queries.per.queue", "q1=2,q2=3",
+    "driver.max.concurrent.launched.queries.per.priority", "NORMAL=2,HIGH=3"
+  ));
+
   @DataProvider
   public Object[][] dpTestAllowsLaunchOfQuery() {
-    return new Object[][] { {2, true} , {10, false}, {11, false}};
+    return new Object[][]{{2, true}, {10, false}, {11, false}};
+  }
+
+  @DataProvider
+  public Object[][] dpTestPerQueueConstraints() {
+    return new Object[][]{
+      {queues("q1", "q2"), "q1", true},
+      {queues("q1", "q1"), "q2", true},
+      {queues("q1", "q1"), "q3", true},
+      {queues("q1", "q1", "q1"), "q2", true}, // hypothetical
+      {queues("q1", "q1", "q2"), "q1", false},
+      {queues("q1", "q2", "q2"), "q1", true},
+      {queues("q1", "q2", "q2"), "q2", true},
+      {queues("q1", "q2", "q1", "q2"), "q2", false},
+      {queues("q1", "q2", "q1", "q2"), "q1", false},
+      {queues("q1", "q2", "q1", "q2"), "q3", false},
+    };
+  }
+
+  @DataProvider
+  public Object[][] dpTestPerPriorityConstraints() {
+    return new Object[][]{
+      {priorities(NORMAL, HIGH), NORMAL, true},
+      {priorities(NORMAL, NORMAL), HIGH, true},
+      {priorities(NORMAL, NORMAL), LOW, true},
+      {priorities(NORMAL, NORMAL, NORMAL), HIGH, true}, // hypothetical
+      {priorities(NORMAL, NORMAL, HIGH), NORMAL, false},
+      {priorities(NORMAL, HIGH, HIGH), NORMAL, true},
+      {priorities(NORMAL, HIGH, HIGH), HIGH, true},
+      {priorities(NORMAL, HIGH, NORMAL, HIGH), HIGH, false},
+      {priorities(NORMAL, HIGH, NORMAL, HIGH), NORMAL, false},
+      {priorities(NORMAL, HIGH, NORMAL, HIGH), LOW, false},
+    };
+  }
+
+  @DataProvider
+  public Object[][] dpTestPerQueuePerPriorityConstraints() {
+    return new Object[][]{
+      {queuePriorities("q1", NORMAL, "q2", NORMAL), "q2", NORMAL, false}, // can't launch NORMAL
+      {queuePriorities("q1", NORMAL, "q1", HIGH), "q1", NORMAL, false}, // can't launch on q1
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH), "q2", NORMAL, true}, // can launch NORMAL on q2
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", HIGH), "q2", NORMAL, true},
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", NORMAL), "q2", NORMAL, false}, // hypothetical
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", HIGH, "q2", NORMAL), "q3", NORMAL, false},
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", HIGH, "q2", NORMAL), "q3", HIGH, false},
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", HIGH, "q2", NORMAL), "q1", LOW, false},
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", HIGH, "q2", NORMAL), "q2", LOW, false},
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", HIGH, "q2", NORMAL), "q3", LOW, true},
+    };
+  }
+
+  @Data
+  public static class QueuePriority {
+    private final String queue;
+    private final Priority priority;
+  }
+
+  private static QueuePriority[] queuePriorities(Object... args) {
+    Assert.assertEquals(args.length % 2, 0);
+    QueuePriority[] queuePriorities = new QueuePriority[args.length / 2];
+    for (int i = 0; i < args.length; i += 2) {
+      queuePriorities[i / 2] = new QueuePriority((String) args[i], (Priority) args[i + 1]);
+    }
+    return queuePriorities;
+  }
+
+  private static String[] queues(Object... args) {
+    String[] queues = new String[args.length];
+    for (int i = 0; i < args.length; i++) {
+      queues[i] = (String) args[i];
+    }
+    return queues;
+  }
+
+  private static Priority[] priorities(Object... args) {
+    Priority[] priorities = new Priority[args.length];
+    for (int i = 0; i < args.length; i++) {
+      priorities[i] = (Priority) args[i];
+    }
+    return priorities;
   }
 
   @Test(dataProvider = "dpTestAllowsLaunchOfQuery")
   public void testAllowsLaunchOfQuery(final int currentDriverLaunchedQueries, final boolean expectedCanLaunch) {
 
-    int maxConcurrentQueries = 10;
-
     QueryContext mockCandidateQuery = mock(QueryContext.class);
     EstimatedImmutableQueryCollection mockLaunchedQueries = mock(EstimatedImmutableQueryCollection.class);
     LensDriver mockDriver = mock(LensDriver.class);
@@ -49,9 +155,76 @@ public class MaxConcurrentDriverQueriesConstraintTest {
     when(mockCandidateQuery.getSelectedDriver()).thenReturn(mockDriver);
     when(mockLaunchedQueries.getQueriesCount(mockDriver)).thenReturn(currentDriverLaunchedQueries);
 
-    QueryLaunchingConstraint constraint = new MaxConcurrentDriverQueriesConstraint(maxConcurrentQueries);
     boolean actualCanLaunch = constraint.allowsLaunchOf(mockCandidateQuery, mockLaunchedQueries);
 
     assertEquals(actualCanLaunch, expectedCanLaunch);
   }
+
+  @Test(dataProvider = "dpTestPerQueueConstraints")
+  public void testPerQueueConstraints(final String[] launchedQueues, final String candidateQueue,
+    final boolean expectedCanLaunch) {
+    EstimatedImmutableQueryCollection mockLaunchedQueries = mock(EstimatedImmutableQueryCollection.class);
+    LensDriver mockDriver = mock(LensDriver.class);
+    Set<QueryContext> launchedQueries = new HashSet<>();
+    for (String queue : launchedQueues) {
+      QueryContext context = mock(QueryContext.class);
+      when(context.getQueue()).thenReturn(queue);
+      launchedQueries.add(context);
+    }
+    when(mockLaunchedQueries.getQueries(mockDriver)).thenReturn(launchedQueries);
+    when(mockLaunchedQueries.getQueriesCount(mockDriver)).thenReturn(launchedQueries.size());
+
+    QueryContext mockCandidateQuery = mock(QueryContext.class);
+    when(mockCandidateQuery.getQueue()).thenReturn(candidateQueue);
+    when(mockCandidateQuery.getSelectedDriver()).thenReturn(mockDriver);
+    boolean actualCanLaunch = perQueueConstraint.allowsLaunchOf(mockCandidateQuery, mockLaunchedQueries);
+
+    assertEquals(actualCanLaunch, expectedCanLaunch);
+  }
+
+  @Test(dataProvider = "dpTestPerPriorityConstraints")
+  public void testPerPriorityConstraints(final Priority[] launchedPriorities, final Priority candidatePriority,
+    final boolean expectedCanLaunch) {
+    EstimatedImmutableQueryCollection mockLaunchedQueries = mock(EstimatedImmutableQueryCollection.class);
+    LensDriver mockDriver = mock(LensDriver.class);
+    Set<QueryContext> launchedQueries = new HashSet<>();
+    for (Priority priority : launchedPriorities) {
+      QueryContext context = mock(QueryContext.class);
+      when(context.getPriority()).thenReturn(priority);
+      launchedQueries.add(context);
+    }
+    when(mockLaunchedQueries.getQueries(mockDriver)).thenReturn(launchedQueries);
+    when(mockLaunchedQueries.getQueriesCount(mockDriver)).thenReturn(launchedQueries.size());
+
+    QueryContext mockCandidateQuery = mock(QueryContext.class);
+    when(mockCandidateQuery.getPriority()).thenReturn(candidatePriority);
+    when(mockCandidateQuery.getSelectedDriver()).thenReturn(mockDriver);
+    boolean actualCanLaunch = perPriorityConstraint.allowsLaunchOf(mockCandidateQuery, mockLaunchedQueries);
+
+    assertEquals(actualCanLaunch, expectedCanLaunch);
+  }
+
+  @Test(dataProvider = "dpTestPerQueuePerPriorityConstraints")
+  public void testPerQueuePerPriorityConstraints(final QueuePriority[] launchedQueuePriorities,
+    final String candidateQueue, final Priority candidatePriority, final boolean expectedCanLaunch) {
+    EstimatedImmutableQueryCollection mockLaunchedQueries = mock(EstimatedImmutableQueryCollection.class);
+    LensDriver mockDriver = mock(LensDriver.class);
+    Set<QueryContext> launchedQueries = new HashSet<>();
+    for (QueuePriority queuePriority : launchedQueuePriorities) {
+      QueryContext context = mock(QueryContext.class);
+      when(context.getQueue()).thenReturn(queuePriority.getQueue());
+      when(context.getPriority()).thenReturn(queuePriority.getPriority());
+      launchedQueries.add(context);
+    }
+    when(mockLaunchedQueries.getQueries(mockDriver)).thenReturn(launchedQueries);
+    when(mockLaunchedQueries.getQueriesCount(mockDriver)).thenReturn(launchedQueries.size());
+
+    QueryContext mockCandidateQuery = mock(QueryContext.class);
+    when(mockCandidateQuery.getQueue()).thenReturn(candidateQueue);
+    when(mockCandidateQuery.getPriority()).thenReturn(candidatePriority);
+    when(mockCandidateQuery.getSelectedDriver()).thenReturn(mockDriver);
+    boolean actualCanLaunch = perQueueAndPerPriorityConstraint.allowsLaunchOf(mockCandidateQuery, mockLaunchedQueries);
+
+    assertEquals(actualCanLaunch, expectedCanLaunch);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index ffd2d42..2dff9af 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -1734,6 +1734,7 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
 
     ctx.setLensSessionIdentifier(sessionHandle.getPublicId().toString());
     rewriteAndSelect(ctx);
+    ctx.getSelectedDriver().decidePriority(ctx);
     return submitQuery(ctx);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server/src/test/java/org/apache/lens/server/query/constraint/ThreadSafeEstimatedQueryCollectionTest.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/constraint/ThreadSafeEstimatedQueryCollectionTest.java b/lens-server/src/test/java/org/apache/lens/server/query/constraint/ThreadSafeEstimatedQueryCollectionTest.java
index 9138f8e..e1bf350 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/constraint/ThreadSafeEstimatedQueryCollectionTest.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/constraint/ThreadSafeEstimatedQueryCollectionTest.java
@@ -21,7 +21,6 @@ package org.apache.lens.server.query.constraint;
 
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
-
 import static org.testng.Assert.assertEquals;
 
 import org.apache.lens.server.api.driver.LensDriver;
@@ -53,7 +52,7 @@ public class ThreadSafeEstimatedQueryCollectionTest {
     LensDriver mockDriver = mock(LensDriver.class);
     LensDriver mockDriver2 = mock(LensDriver.class);
 
-    QueryLaunchingConstraint constraint = new MaxConcurrentDriverQueriesConstraint(maxConcurrentQueries);
+    QueryLaunchingConstraint constraint = new MaxConcurrentDriverQueriesConstraint(maxConcurrentQueries, null, null);
     ThreadSafeEstimatedQueryCollection col = new ThreadSafeEstimatedQueryCollection(new
       DefaultEstimatedQueryCollection(new DefaultQueryCollection()));
 


[38/51] [abbrv] lens git commit: LENS-933 : Allow presubmit hook to throw LensException

Posted by de...@apache.org.
LENS-933 : Allow presubmit hook to throw LensException


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/8a365729
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/8a365729
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/8a365729

Branch: refs/heads/current-release-line
Commit: 8a36572918540e83eae8c16a45c8479879ae7949
Parents: 919936b
Author: Rajat Khandelwal <prongs@apache,org>
Authored: Wed Jan 27 16:49:03 2016 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Jan 27 16:49:03 2016 +0530

----------------------------------------------------------------------
 .../java/org/apache/lens/server/api/driver/DriverQueryHook.java  | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/8a365729/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryHook.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryHook.java b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryHook.java
index 70d999a..ecac6be 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryHook.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/DriverQueryHook.java
@@ -21,6 +21,7 @@
  */
 package org.apache.lens.server.api.driver;
 
+import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.query.AbstractQueryContext;
 
 /**
@@ -45,6 +46,7 @@ public interface DriverQueryHook {
   /**
    * Should be Called before launch on the driver
    * @param ctx
+   * @throws LensException
    */
-  void preLaunch(AbstractQueryContext ctx);
+  void preLaunch(AbstractQueryContext ctx) throws LensException;
 }


[46/51] [abbrv] lens git commit: LENS-920 : Fix issues in producing and consuming json for all api

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java b/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
index 6f8886c..30f7aff 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
@@ -31,6 +31,7 @@ import static org.testng.Assert.assertTrue;
 
 import javax.ws.rs.client.WebTarget;
 import javax.ws.rs.core.Application;
+import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import javax.xml.datatype.DatatypeConfigurationException;
 
@@ -41,6 +42,7 @@ import org.apache.lens.api.metastore.*;
 import org.apache.lens.api.query.SupportedQuerySubmitOperations;
 import org.apache.lens.api.result.LensAPIResult;
 import org.apache.lens.api.result.LensErrorTO;
+import org.apache.lens.api.util.MoxyJsonConfigurationContextResolver;
 import org.apache.lens.cube.error.ColUnAvailableInTimeRange;
 import org.apache.lens.server.LensJerseyTest;
 import org.apache.lens.server.LensRequestContextInitFilter;
@@ -50,12 +52,10 @@ import org.apache.lens.server.error.LensJAXBValidationExceptionMapper;
 import org.apache.lens.server.metastore.MetastoreResource;
 import org.apache.lens.server.session.SessionResource;
 
-import org.glassfish.jersey.client.ClientConfig;
 import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.glassfish.jersey.moxy.json.MoxyJsonFeature;
 import org.glassfish.jersey.server.ResourceConfig;
 import org.glassfish.jersey.test.TestProperties;
-import org.glassfish.jersey.test.inmemory.InMemoryTestContainerFactory;
-import org.glassfish.jersey.test.spi.TestContainerFactory;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.joda.time.format.DateTimeFormat;
@@ -91,24 +91,14 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
 
     return new ResourceConfig(LensRequestContextInitFilter.class, SessionResource.class, MetastoreResource.class,
       QueryServiceResource.class, MultiPartFeature.class, LensExceptionMapper.class, LensJAXBContextResolver.class,
-      LensRequestContextInitFilter.class, LensJAXBValidationExceptionMapper.class);
+      LensRequestContextInitFilter.class, LensJAXBValidationExceptionMapper.class,
+      MoxyJsonConfigurationContextResolver.class, MoxyJsonFeature.class);
   }
 
-  @Override
-  protected void configureClient(ClientConfig config) {
-    config.register(MultiPartFeature.class);
-    config.register(LensJAXBContextResolver.class);
-  }
+  @Test(dataProvider = "mediaTypeData")
+  public void testErrorResponseWhenSessionIdIsAbsent(MediaType mt) {
 
-  @Override
-  protected TestContainerFactory getTestContainerFactory() {
-    return new InMemoryTestContainerFactory();
-  }
-
-  @Test
-  public void testErrorResponseWhenSessionIdIsAbsent() {
-
-    Response response = estimate(target(), Optional.<LensSessionHandle>absent(), Optional.of(MOCK_QUERY));
+    Response response = estimate(target(), Optional.<LensSessionHandle>absent(), Optional.of(MOCK_QUERY), mt);
 
     final String expectedErrMsg = "Session id not provided. Please provide a session id.";
     LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(
@@ -118,12 +108,12 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
     expectedData.verify(response);
   }
 
-  @Test
-  public void testErrorResponseWhenQueryIsAbsent() {
+  @Test(dataProvider = "mediaTypeData")
+  public void testErrorResponseWhenQueryIsAbsent(MediaType mt) {
 
-    LensSessionHandle sessionId = openSession(target(), "foo", "bar", new LensConf());
+    LensSessionHandle sessionId = openSession(target(), "foo", "bar", new LensConf(), mt);
     Optional<String> testQuery = Optional.absent();
-    Response response = estimate(target(), Optional.of(sessionId), testQuery);
+    Response response = estimate(target(), Optional.of(sessionId), testQuery, mt);
 
     final String expectedErrMsg = "Query is not provided, or it is empty or blank. Please provide a valid query.";
     LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(
@@ -133,13 +123,13 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
     expectedData.verify(response);
   }
 
-  @Test
-  public void testErrorResponseWhenInvalidOperationIsSubmitted() {
+  @Test(dataProvider = "mediaTypeData")
+  public void testErrorResponseWhenInvalidOperationIsSubmitted(MediaType mt) {
 
-    LensSessionHandle sessionId = openSession(target(), "foo", "bar", new LensConf());
+    LensSessionHandle sessionId = openSession(target(), "foo", "bar", new LensConf(), mt);
 
     Response response = postQuery(target(), Optional.of(sessionId), Optional.of(MOCK_QUERY),
-        Optional.of(INVALID_OPERATION));
+        Optional.of(INVALID_OPERATION), mt);
 
     final String expectedErrMsg = "Provided Operation is not supported. Supported Operations are: "
       + "[estimate, execute, explain, execute_with_timeout]";
@@ -152,17 +142,16 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
     expectedData.verify(response);
   }
 
-  @Test
-  public void testErrorResponseWhenLensMultiCauseExceptionOccurs() {
+  @Test(dataProvider = "mediaTypeData")
+  public void testErrorResponseWhenLensMultiCauseExceptionOccurs(MediaType mt) {
 
-    LensSessionHandle sessionId = openSession(target(), "foo", "bar");
+    LensSessionHandle sessionId = openSession(target(), "foo", "bar", mt);
 
     final String testQuery = "select * from non_existing_table";
-    Response response = estimate(target(), Optional.of(sessionId), Optional.of(testQuery));
+    Response response = estimate(target(), Optional.of(sessionId), Optional.of(testQuery), mt);
 
     final String expectedErrMsg1 = "Semantic Error : Error while compiling statement: "
       + "FAILED: SemanticException [Error 10001]: Line 1:31 Table not found 'non_existing_table'";
-
     final String expectedErrMsg2 = "Semantic Error : user lacks privilege or object not found: NON_EXISTING_TABLE";
 
     LensErrorTO expectedLensErrorTO1 = LensErrorTO.composedOf(INTERNAL_SERVER_ERROR.getValue(),
@@ -174,15 +163,16 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
     LensErrorTO responseLensErrorTO = response.readEntity(LensAPIResult.class).getLensErrorTO();
 
     assertTrue(expectedLensErrorTO1.getMessage().equals(responseLensErrorTO.getMessage())
-            || expectedLensErrorTO2.getMessage().equals(responseLensErrorTO.getMessage()));
+            || expectedLensErrorTO2.getMessage().equals(responseLensErrorTO.getMessage()),
+      "Message is " + responseLensErrorTO.getMessage());
   }
 
-  @Test
-  public void testErrorResponseWithSyntaxErrorInQuery() {
+  @Test(dataProvider = "mediaTypeData")
+  public void testErrorResponseWithSyntaxErrorInQuery(MediaType mt) {
 
-    LensSessionHandle sessionId = openSession(target(), "foo", "bar", new LensConf());
+    LensSessionHandle sessionId = openSession(target(), "foo", "bar", new LensConf(), mt);
 
-    Response response = estimate(target(), Optional.of(sessionId), Optional.of(MOCK_QUERY));
+    Response response = estimate(target(), Optional.of(sessionId), Optional.of(MOCK_QUERY), mt);
 
     final String expectedErrMsg = "Syntax Error: line 1:0 cannot recognize input near 'mock' '-' 'query'";
     LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(SYNTAX_ERROR.getLensErrorInfo().getErrorCode(),
@@ -192,8 +182,8 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
     expectedData.verify(response);
   }
 
-  @Test
-  public void testQueryColumnWithBothStartDateAndEndDate() throws DatatypeConfigurationException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testQueryColumnWithBothStartDateAndEndDate(MediaType mt) throws DatatypeConfigurationException {
 
     /* This test will have a col which has both start date and end date set */
     /* Col will be queried for a time range which does not fall in start date and end date */
@@ -208,11 +198,11 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
       + "before Friday, January 30, 2015 11:00:00 PM UTC. Please adjust the selected time range accordingly.";
 
     testColUnAvailableInTimeRange(Optional.of(startDateOneJan2015),
-      Optional.of(endDateThirtyJan2015), queryFromOneJan2014, queryTillThreeJan2014, expectedErrMsgSuffix);
+      Optional.of(endDateThirtyJan2015), queryFromOneJan2014, queryTillThreeJan2014, expectedErrMsgSuffix, mt);
   }
 
-  @Test
-  public void testQueryColumnWithOnlyStartDate() throws DatatypeConfigurationException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testQueryColumnWithOnlyStartDate(MediaType mt) throws DatatypeConfigurationException {
 
     /* This test will have a col which has only start date set */
     /* Col will be queried for a time range which is before start date */
@@ -226,11 +216,11 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
       + "Please adjust the selected time range accordingly.";
 
     testColUnAvailableInTimeRange(Optional.of(startDateOneJan2015),
-      Optional.<DateTime>absent(), queryFromOneJan2014, queryTillThreeJan2014, expectedErrMsgSuffix);
+      Optional.<DateTime>absent(), queryFromOneJan2014, queryTillThreeJan2014, expectedErrMsgSuffix, mt);
   }
 
-  @Test
-  public void testQueryColumnWithOnlyEndDate() throws DatatypeConfigurationException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testQueryColumnWithOnlyEndDate(MediaType mt) throws DatatypeConfigurationException {
 
     /* This test will have a col which has only end date set */
     /* Col will be queried for a time range which is after end date */
@@ -244,12 +234,12 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
       + "Please adjust the selected time range accordingly.";
 
     testColUnAvailableInTimeRange(Optional.<DateTime>absent(),
-      Optional.of(endDateThirtyJan2015), queryFromOneJan2016, queryTillThreeJan2016, expectedErrMsgSuffix);
+      Optional.of(endDateThirtyJan2015), queryFromOneJan2016, queryTillThreeJan2016, expectedErrMsgSuffix, mt);
   }
 
   private void testColUnAvailableInTimeRange(@NonNull final Optional<DateTime> colStartDate,
     @NonNull final Optional<DateTime> colEndDate, @NonNull DateTime queryFrom, @NonNull DateTime queryTill,
-    @NonNull final String expectedErrorMsgSuffix) throws DatatypeConfigurationException {
+    @NonNull final String expectedErrorMsgSuffix, @NonNull final MediaType mt) throws DatatypeConfigurationException {
 
     final WebTarget target = target();
     final String testDb = getRandomDbName();
@@ -258,21 +248,21 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
     final String testFact = getRandomFactName();
 
     /* Setup: Begin */
-    LensSessionHandle sessionId = openSession(target, "foo", "bar", new LensConf());
+    LensSessionHandle sessionId = openSession(target, "foo", "bar", new LensConf(), mt);
 
     try {
 
-      createAndSetCurrentDbFailFast(target, sessionId, testDb);
+      createAndSetCurrentDbFailFast(target, sessionId, testDb, mt);
 
       /* Create a test cube with test dimension field having a start Date and end Date */
       XDimAttribute testXDim = createXDimAttribute(testDimensionField, colStartDate, colEndDate);
       XCube xcube = createXCubeWithDummyMeasure(testCube, Optional.of("dt"), testXDim);
-      createCubeFailFast(target, sessionId, xcube);
+      createCubeFailFast(target, sessionId, xcube, mt);
 
       /* Create a fact with test dimension field */
       XColumn xColumn = createXColumn(testDimensionField);
       XFactTable xFactTable = createXFactTableWithColumns(testFact, testCube, xColumn);
-      createFactFailFast(target, sessionId, xFactTable);
+      createFactFailFast(target, sessionId, xFactTable, mt);
 
       /* Setup: End */
 
@@ -280,7 +270,7 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
       final String testQuery = "cube select " + testDimensionField + " from " + testCube + " where TIME_RANGE_IN(dt, "
         + "\"" + dtf.print(queryFrom) + "\",\"" + dtf.print(queryTill) + "\")";
 
-      Response response = estimate(target, Optional.of(sessionId), Optional.of(testQuery));
+      Response response = estimate(target, Optional.of(sessionId), Optional.of(testQuery), mt);
 
       final String expectedErrMsg = testDimensionField + expectedErrorMsgSuffix;
 
@@ -297,8 +287,8 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
 
       expectedData.verify(response);
     } finally {
-      dropDatabaseFailFast(target, sessionId, testDb);
-      closeSessionFailFast(target, sessionId);
+      dropDatabaseFailFast(target, sessionId, testDb, mt);
+      closeSessionFailFast(target, sessionId, mt);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java b/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java
index 01e846a..4597f9d 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java
@@ -25,15 +25,12 @@ import java.sql.Statement;
 import java.util.HashMap;
 import java.util.List;
 
-import javax.ws.rs.core.Application;
-
 import org.apache.lens.api.LensConf;
 import org.apache.lens.api.LensSessionHandle;
 import org.apache.lens.api.query.LensQuery;
 import org.apache.lens.api.query.QueryHandle;
 import org.apache.lens.api.query.QueryStatus;
 import org.apache.lens.driver.jdbc.JDBCResultSet;
-import org.apache.lens.server.LensJerseyTest;
 import org.apache.lens.server.LensServices;
 import org.apache.lens.server.api.driver.MockDriver;
 import org.apache.lens.server.api.query.FinishedLensQuery;
@@ -53,7 +50,7 @@ import lombok.extern.slf4j.Slf4j;
  */
 @Test(groups = "unit-test")
 @Slf4j
-public class TestLensDAO extends LensJerseyTest {
+public class TestLensDAO {
 
   /**
    * Test lens server dao.
@@ -146,14 +143,4 @@ public class TestLensDAO extends LensJerseyTest {
     Assert.assertEquals(daoTestQueryHandles.size(), 1);
     Assert.assertEquals(daoTestQueryHandles.get(0).getHandleId().toString(), finishedHandle);
   }
-
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.glassfish.jersey.test.JerseyTest#configure()
-   */
-  @Override
-  protected Application configure() {
-    return new QueryApp();
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java
index ab42a3d..8493d85 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java
@@ -25,9 +25,9 @@ import static org.testng.Assert.*;
 import java.util.*;
 
 import javax.ws.rs.core.Application;
+import javax.ws.rs.core.MediaType;
 
 import org.apache.lens.api.LensSessionHandle;
-import org.apache.lens.api.jaxb.LensJAXBContextResolver;
 import org.apache.lens.api.query.QueryHandle;
 import org.apache.lens.driver.hive.HiveDriver;
 import org.apache.lens.server.LensJerseyTest;
@@ -47,8 +47,6 @@ import org.apache.lens.server.common.TestResourceFile;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 
-import org.glassfish.jersey.client.ClientConfig;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
 import org.glassfish.jersey.test.TestProperties;
 
 import org.testng.annotations.AfterMethod;
@@ -150,17 +148,6 @@ public class TestQueryConstraints extends LensJerseyTest {
     return new TestQueryService.QueryServiceTestApp();
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.glassfish.jersey.test.JerseyTest#configureClient(org.glassfish.jersey.client.ClientConfig)
-   */
-  @Override
-  protected void configureClient(ClientConfig config) {
-    config.register(MultiPartFeature.class);
-    config.register(LensJAXBContextResolver.class);
-  }
-
   /** The test table. */
   public static final String TEST_TABLE = "TEST_TABLE";
 
@@ -171,7 +158,7 @@ public class TestQueryConstraints extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void createTable(String tblName) throws InterruptedException {
-    LensServerTestUtil.createTable(tblName, target(), lensSessionId);
+    LensServerTestUtil.createTable(tblName, target(), lensSessionId, defaultMT);
   }
 
   /**
@@ -182,7 +169,7 @@ public class TestQueryConstraints extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void loadData(String tblName, final String testDataFile) throws InterruptedException {
-    LensServerTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId);
+    LensServerTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId, defaultMT);
   }
 
   /**
@@ -192,26 +179,26 @@ public class TestQueryConstraints extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void dropTable(String tblName) throws InterruptedException {
-    LensServerTestUtil.dropTable(tblName, target(), lensSessionId);
+    LensServerTestUtil.dropTable(tblName, target(), lensSessionId, defaultMT);
   }
 
-  @Test
-  public void testThrottling() throws InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testThrottling(MediaType mt) throws InterruptedException {
     List<QueryHandle> handles = Lists.newArrayList();
     for (int j = 0; j < 5; j++) {
       for (int i = 0; i < 10; i++) {
-        handles.add(launchQuery());
+        handles.add(launchQuery(mt));
         assertValidity();
       }
       // No harm in sleeping, the queries will anyway take time.
       Thread.sleep(1000);
     }
     for (QueryHandle handle : handles) {
-      RestAPITestUtil.waitForQueryToFinish(target(), lensSessionId, handle);
+      RestAPITestUtil.waitForQueryToFinish(target(), lensSessionId, handle, mt);
       assertValidity();
     }
     for (QueryHandle handle : handles) {
-      RestAPITestUtil.getLensQueryResult(target(), lensSessionId, handle);
+      RestAPITestUtil.getLensQueryResultAsString(target(), lensSessionId, handle, mt);
       assertValidity();
     }
   }
@@ -222,10 +209,10 @@ public class TestQueryConstraints extends LensJerseyTest {
       + queryService.getLaunchedQueries());
   }
 
-  private QueryHandle launchQuery() {
+  private QueryHandle launchQuery(MediaType mt) {
     return RestAPITestUtil.executeAndGetHandle(target(), Optional.of(lensSessionId),
       Optional.of("select ID from " + TEST_TABLE),
-      Optional.of(LensServerAPITestUtil.getLensConf(QUERY_METRIC_UNIQUE_ID_CONF_KEY, UUID.randomUUID())));
+      Optional.of(LensServerAPITestUtil.getLensConf(QUERY_METRIC_UNIQUE_ID_CONF_KEY, UUID.randomUUID())), mt);
   }
 
   @AfterMethod

http://git-wip-us.apache.org/repos/asf/lens/blob/d559ef2e/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
index 4ac42b2..e97a5a3 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
@@ -31,6 +31,7 @@ import java.util.Map;
 
 import javax.ws.rs.core.Application;
 import javax.ws.rs.core.GenericType;
+import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 
 import org.apache.lens.api.LensConf;
@@ -49,8 +50,7 @@ import org.apache.lens.server.common.TestResourceFile;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 
-import org.glassfish.jersey.client.ClientConfig;
-import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.glassfish.jersey.test.TestProperties;
 import org.subethamail.wiser.Wiser;
 import org.subethamail.wiser.WiserMessage;
 import org.testng.annotations.*;
@@ -64,6 +64,7 @@ import com.google.common.base.Optional;
 public class TestQueryEndEmailNotifier extends LensJerseyTest {
 
   private static final int NUM_ITERS = 30;
+
   /** The query service. */
   QueryExecutionServiceImpl queryService;
 
@@ -121,19 +122,11 @@ public class TestQueryEndEmailNotifier extends LensJerseyTest {
    */
   @Override
   protected Application configure() {
+    enable(TestProperties.LOG_TRAFFIC);
+    enable(TestProperties.DUMP_ENTITY);
     return new QueryApp();
   }
 
-  /*
-   * (non-Javadoc)
-   *
-   * @see org.glassfish.jersey.test.JerseyTest#configureClient(org.glassfish.jersey.client.ClientConfig)
-   */
-  @Override
-  protected void configureClient(ClientConfig config) {
-    config.register(MultiPartFeature.class);
-  }
-
   /** The test table. */
   public static final String TEST_TABLE = "EMAIL_NOTIFIER_TEST_TABLE";
 
@@ -144,7 +137,7 @@ public class TestQueryEndEmailNotifier extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void createTable(String tblName) throws InterruptedException {
-    LensServerTestUtil.createTable(tblName, target(), lensSessionId);
+    LensServerTestUtil.createTable(tblName, target(), lensSessionId, defaultMT);
   }
 
   /**
@@ -155,7 +148,7 @@ public class TestQueryEndEmailNotifier extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void loadData(String tblName, final String testDataFile) throws InterruptedException {
-    LensServerTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId);
+    LensServerTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId, defaultMT);
   }
 
   /**
@@ -165,13 +158,13 @@ public class TestQueryEndEmailNotifier extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void dropTable(String tblName) throws InterruptedException {
-    LensServerTestUtil.dropTable(tblName, target(), lensSessionId);
+    LensServerTestUtil.dropTable(tblName, target(), lensSessionId, defaultMT);
   }
 
-  private QueryHandle launchAndWaitForQuery(LensConf conf, String query, Status expectedStatus)
+  private QueryHandle launchAndWaitForQuery(LensConf conf, String query, Status expectedStatus, MediaType mt)
     throws InterruptedException {
     return executeAndWaitForQueryToFinish(target(), lensSessionId, query, Optional.of(conf),
-      Optional.of(expectedStatus)).getQueryHandle();
+      Optional.of(expectedStatus), mt).getQueryHandle();
   }
 
   private WiserMessage getMessage() throws InterruptedException {
@@ -200,58 +193,64 @@ public class TestQueryEndEmailNotifier extends LensJerseyTest {
    *
    * @throws InterruptedException the interrupted exception
    */
-  @Test
-  public void testLaunchFailure() throws InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testLaunchFailure(MediaType mt) throws InterruptedException {
     // launch failure
-    final Response response = execute(target(), Optional.of(lensSessionId), Optional.of("select fail from non_exist"));
+    final Response response = execute(target(), Optional.of(lensSessionId), Optional.of("select fail from non_exist"),
+      mt);
     assertEquals(response.getStatus(), Response.Status.OK.getStatusCode());
     QueryHandle handle = response.readEntity(new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
     assertKeywordsContains(getMessage(), handle, "Launching query failed", "Reason");
   }
 
-  @Test
-  public void testFormattingFailure() throws InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testFormattingFailure(MediaType mt) throws InterruptedException {
     // formatting failure
     LensConf conf = getLensConf(
       LensConfConstants.QUERY_PERSISTENT_RESULT_SET, "true",
       LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false",
       LensConfConstants.QUERY_OUTPUT_SERDE, "NonexistentSerde.class");
-    QueryHandle handle = launchAndWaitForQuery(conf, "select ID, IDSTR from " + TEST_TABLE, Status.FAILED);
+    QueryHandle handle = launchAndWaitForQuery(conf, "select ID, IDSTR from " + TEST_TABLE, Status.FAILED, mt);
     assertKeywordsContains(getMessage(), handle, "Result formatting failed!", "Reason");
   }
 
-  @Test
-  public void testExecutionFailure() throws InterruptedException {
+  @Test(dataProvider = "mediaTypeData")
+  public void testExecutionFailure(MediaType mt) throws InterruptedException {
     // execution failure
     LensConf conf = getLensConf(
       LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "true",
       HiveConf.ConfVars.COMPRESSRESULT.name(), "true",
       "mapred.compress.map.output", "true",
       "mapred.map.output.compression.codec", "nonexisting");
-    QueryHandle handle = launchAndWaitForQuery(conf, "select count(ID) from " + TEST_TABLE, Status.FAILED);
+    QueryHandle handle = launchAndWaitForQuery(conf, "select count(ID) from " + TEST_TABLE, Status.FAILED, mt);
     assertKeywordsContains(getMessage(), handle, "Query execution failed!", "Reason");
   }
 
   @DataProvider(name = "success-tests")
   public Object[][] persistenceConfigDataProvider() {
     return new Object[][]{
-      {false, false, },
-      {true, false, },
-      {false, true, },
-      {true, true, },
+      {false, false, MediaType.APPLICATION_XML_TYPE},
+      {true, false, MediaType.APPLICATION_XML_TYPE},
+      {false, true, MediaType.APPLICATION_XML_TYPE},
+      {true, true, MediaType.APPLICATION_XML_TYPE},
+      {false, false, MediaType.APPLICATION_JSON_TYPE},
+      {true, false, MediaType.APPLICATION_JSON_TYPE},
+      {false, true, MediaType.APPLICATION_JSON_TYPE},
+      {true, true, MediaType.APPLICATION_JSON_TYPE},
     };
   }
 
   @Test(dataProvider = "success-tests")
-  public void testSuccessfulQuery(Boolean lensPersistence, Boolean driverPersistence) throws InterruptedException {
+  public void testSuccessfulQuery(Boolean lensPersistence, Boolean driverPersistence, MediaType mt)
+    throws InterruptedException {
     // successful query
     LensConf conf = getLensConf(
       LensConfConstants.QUERY_PERSISTENT_RESULT_SET, lensPersistence,
       LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, driverPersistence);
-    QueryHandle handle = launchAndWaitForQuery(conf, "select ID, IDSTR from " + TEST_TABLE, Status.SUCCESSFUL);
+    QueryHandle handle = launchAndWaitForQuery(conf, "select ID, IDSTR from " + TEST_TABLE, Status.SUCCESSFUL, mt);
     String expectedKeywords;
     if (lensPersistence || driverPersistence) {
-      QueryResult result = getLensQueryResult(target(), lensSessionId, handle);
+      QueryResult result = getLensQueryResult(target(), lensSessionId, handle, mt);
       expectedKeywords = result.toPrettyString();
     } else {
       expectedKeywords = InMemoryQueryResult.DECLARATION;


[16/51] [abbrv] lens git commit: LENS-907 : Subsequent calls to metastore API to fetch native tables throws error after the first call

Posted by de...@apache.org.
LENS-907 : Subsequent calls to metastore API to fetch native tables throws error after the first call


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/4d7c8e4d
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/4d7c8e4d
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/4d7c8e4d

Branch: refs/heads/current-release-line
Commit: 4d7c8e4db3e9aa81da042921a31dfd670982ce38
Parents: 7a89db1
Author: Deepak Barr <de...@apache.org>
Authored: Thu Dec 24 17:14:31 2015 +0530
Committer: Deepak Kumar Barr <de...@apache.org>
Committed: Thu Dec 24 17:14:31 2015 +0530

----------------------------------------------------------------------
 .../apache/lens/server/metastore/CubeMetastoreServiceImpl.java   | 4 ----
 1 file changed, 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/4d7c8e4d/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
index 66ed938..cf49a13 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
@@ -1212,10 +1212,6 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
       return result;
     } catch (Exception e) {
       throw new LensException("Error getting native tables from DB", e);
-    } finally {
-      if (null != msc) {
-        msc.close();
-      }
     }
   }
 


[27/51] [abbrv] lens git commit: LENS-735 : Remove accepting TableReferences for ReferenceDimAttribute

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/join/JoinPath.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/join/JoinPath.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/join/JoinPath.java
new file mode 100644
index 0000000..48f04bb
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/join/JoinPath.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata.join;
+
+import java.util.*;
+
+import org.apache.lens.cube.metadata.AbstractCubeTable;
+
+/**
+ * A list of table relationships that can be combined to get a join clause
+ */
+public class JoinPath {
+  final List<TableRelationship> edges;
+  // Store the map of a table against all columns of that table which are in the path
+  private Map<AbstractCubeTable, List<String>> columnsForTable = new HashMap<>();
+
+  public JoinPath() {
+    edges = new ArrayList<>();
+  }
+
+  public JoinPath(JoinPath other) {
+    edges = new ArrayList<>(other.edges);
+  }
+
+  public void initColumnsForTable() {
+    if (!columnsForTable.isEmpty()) {
+      // already initialized
+      return;
+    }
+    for (TableRelationship edge : edges) {
+      addColumnsForEdge(edge);
+    }
+  }
+
+  public void addEdge(TableRelationship edge) {
+    edges.add(edge);
+  }
+
+  public boolean isEmpty() {
+    return edges.isEmpty();
+  }
+
+  public List<TableRelationship> getEdges() {
+    return edges;
+  }
+
+  private void addColumnsForEdge(TableRelationship edge) {
+    addColumn(edge.getFromTable(), edge.getFromColumn());
+    addColumn(edge.getToTable(), edge.getToColumn());
+  }
+
+  private void addColumn(AbstractCubeTable table, String column) {
+    if (table == null || column == null) {
+      return;
+    }
+    List<String> columns = columnsForTable.get(table);
+    if (columns == null) {
+      columns = new ArrayList<>();
+      columnsForTable.put(table, columns);
+    }
+    columns.add(column);
+  }
+
+  public List<String> getColumnsForTable(AbstractCubeTable table) {
+    return columnsForTable.get(table);
+  }
+
+  public Set<AbstractCubeTable> getAllTables() {
+    return columnsForTable.keySet();
+  }
+
+  public boolean containsColumnOfTable(String column, AbstractCubeTable table) {
+    for (TableRelationship edge : edges) {
+      if ((table.equals(edge.getFromTable()) && column.equals(edge.getFromColumn()))
+        || table.equals(edge.getToTable()) && column.equals(edge.getToColumn())) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  public String toString() {
+    return edges.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/metadata/join/TableRelationship.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/join/TableRelationship.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/join/TableRelationship.java
new file mode 100644
index 0000000..dabb9ef
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/join/TableRelationship.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata.join;
+
+import org.apache.lens.cube.metadata.AbstractCubeTable;
+
+import lombok.AllArgsConstructor;
+import lombok.Data;
+import lombok.RequiredArgsConstructor;
+
+/*
+ * An edge in the schema graph
+ */
+@Data
+@AllArgsConstructor
+@RequiredArgsConstructor
+public class TableRelationship {
+  final String fromColumn;
+  final AbstractCubeTable fromTable;
+  final String toColumn;
+  final AbstractCubeTable toTable;
+  boolean mapsToMany = false;
+
+  @Override
+  public String toString() {
+    return fromTable.getName() + "." + fromColumn + "->" + toTable.getName() + "." + toColumn
+      + (mapsToMany ? "[n]" : "");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
deleted file mode 100644
index 7f13c6c..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
+++ /dev/null
@@ -1,760 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import java.util.*;
-
-import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.metadata.*;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.parse.JoinType;
-
-import lombok.Getter;
-import lombok.Setter;
-import lombok.extern.slf4j.Slf4j;
-
-/**
- * Store join chain information resolved by join resolver
- */
-@Slf4j
-public class AutoJoinContext {
-  // Map of a joined table to list of all possible paths from that table to
-  // the target
-  private final Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths;
-  @Getter
-  // User supplied partial join conditions
-  private final Map<AbstractCubeTable, String> partialJoinConditions;
-  // True if the query contains user supplied partial join conditions
-  @Getter
-  private final boolean partialJoinChains;
-  @Getter
-  // Map of joined table to the join type (if provided by user)
-  private final Map<AbstractCubeTable, JoinType> tableJoinTypeMap;
-
-  // True if joins were resolved automatically
-  private boolean joinsResolved;
-  // Target table for the auto join resolver
-  private final AbstractCubeTable autoJoinTarget;
-  // Configuration string to control join type
-  private String joinTypeCfg;
-
-  // Map of a joined table to its columns which are part of any of the join
-  // paths. This is used in candidate table resolver
-  @Getter
-  private Map<Dimension, Map<AbstractCubeTable, List<String>>> joinPathFromColumns =
-    new HashMap<Dimension, Map<AbstractCubeTable, List<String>>>();
-
-  @Getter
-  private Map<Dimension, Map<AbstractCubeTable, List<String>>> joinPathToColumns =
-    new HashMap<Dimension, Map<AbstractCubeTable, List<String>>>();
-
-  // there can be separate join clause for each fact incase of multi fact queries
-  @Getter
-  Map<CandidateFact, JoinClause> factClauses = new HashMap<CandidateFact, JoinClause>();
-  @Getter
-  @Setter
-  JoinClause minCostClause;
-  private final boolean flattenBridgeTables;
-  private final String bridgeTableFieldAggr;
-
-  public AutoJoinContext(Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths,
-                         Map<Dimension, CubeQueryContext.OptionalDimCtx> optionalDimensions,
-                         Map<AbstractCubeTable, String> partialJoinConditions,
-                         boolean partialJoinChains, Map<AbstractCubeTable, JoinType> tableJoinTypeMap,
-                         AbstractCubeTable autoJoinTarget, String joinTypeCfg, boolean joinsResolved,
-                         boolean flattenBridgeTables, String bridgeTableFieldAggr) {
-    this.allPaths = allPaths;
-    initJoinPathColumns();
-    this.partialJoinConditions = partialJoinConditions;
-    this.partialJoinChains = partialJoinChains;
-    this.tableJoinTypeMap = tableJoinTypeMap;
-    this.autoJoinTarget = autoJoinTarget;
-    this.joinTypeCfg = joinTypeCfg;
-    this.joinsResolved = joinsResolved;
-    this.flattenBridgeTables = flattenBridgeTables;
-    this.bridgeTableFieldAggr = bridgeTableFieldAggr;
-    log.debug("All join paths:{}", allPaths);
-    log.debug("Join path from columns:{}", joinPathFromColumns);
-    log.debug("Join path to columns:{}", joinPathToColumns);
-  }
-
-  public AbstractCubeTable getAutoJoinTarget() {
-    return autoJoinTarget;
-  }
-
-  private JoinClause getJoinClause(CandidateFact fact) {
-    if (fact == null || !factClauses.containsKey(fact)) {
-      return minCostClause;
-    }
-    return factClauses.get(fact);
-  }
-
-  // Populate map of tables to their columns which are present in any of the
-  // join paths
-  private void initJoinPathColumns() {
-    for (List<SchemaGraph.JoinPath> paths : allPaths.values()) {
-      for (int i = 0; i < paths.size(); i++) {
-        SchemaGraph.JoinPath jp = paths.get(i);
-        jp.initColumnsForTable();
-      }
-    }
-    refreshJoinPathColumns();
-  }
-
-  public void refreshJoinPathColumns() {
-    joinPathFromColumns.clear();
-    joinPathToColumns.clear();
-    for (Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> joinPathEntry : allPaths.entrySet()) {
-      List<SchemaGraph.JoinPath> joinPaths = joinPathEntry.getValue();
-      Map<AbstractCubeTable, List<String>> fromColPaths = joinPathFromColumns.get(joinPathEntry.getKey().getObject());
-      Map<AbstractCubeTable, List<String>> toColPaths = joinPathToColumns.get(joinPathEntry.getKey().getObject());
-      if (fromColPaths == null) {
-        fromColPaths = new HashMap<AbstractCubeTable, List<String>>();
-        joinPathFromColumns.put(joinPathEntry.getKey().getObject(), fromColPaths);
-      }
-
-      if (toColPaths == null) {
-        toColPaths = new HashMap<AbstractCubeTable, List<String>>();
-        joinPathToColumns.put(joinPathEntry.getKey().getObject(), toColPaths);
-      }
-      populateJoinPathCols(joinPaths, fromColPaths, toColPaths);
-    }
-  }
-
-  private void populateJoinPathCols(List<SchemaGraph.JoinPath> joinPaths,
-    Map<AbstractCubeTable, List<String>> fromPathColumns, Map<AbstractCubeTable, List<String>> toPathColumns) {
-    for (SchemaGraph.JoinPath path : joinPaths) {
-      for (SchemaGraph.TableRelationship edge : path.getEdges()) {
-        AbstractCubeTable fromTable = edge.getFromTable();
-        String fromColumn = edge.getFromColumn();
-        List<String> columnsOfFromTable = fromPathColumns.get(fromTable);
-        if (columnsOfFromTable == null) {
-          columnsOfFromTable = new ArrayList<String>();
-          fromPathColumns.put(fromTable, columnsOfFromTable);
-        }
-        columnsOfFromTable.add(fromColumn);
-
-        // Similarly populate for the 'to' table
-        AbstractCubeTable toTable = edge.getToTable();
-        String toColumn = edge.getToColumn();
-        List<String> columnsOfToTable = toPathColumns.get(toTable);
-        if (columnsOfToTable == null) {
-          columnsOfToTable = new ArrayList<String>();
-          toPathColumns.put(toTable, columnsOfToTable);
-        }
-        columnsOfToTable.add(toColumn);
-      }
-    }
-  }
-
-  public void removeJoinedTable(Dimension dim) {
-    allPaths.remove(Aliased.create(dim));
-    joinPathFromColumns.remove(dim);
-  }
-
-  public Map<AbstractCubeTable, String> getPartialJoinConditions() {
-    return partialJoinConditions;
-  }
-
-  public String getFromString(String fromTable, CandidateFact fact, Set<Dimension> qdims,
-    Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql) throws LensException {
-    String fromString = fromTable;
-    log.info("All paths dump:{}", cubeql.getAutoJoinCtx().getAllPaths());
-    if (qdims == null || qdims.isEmpty()) {
-      return fromString;
-    }
-    // Compute the merged join clause string for the min cost joinclause
-    String clause = getMergedJoinClause(cubeql, cubeql.getAutoJoinCtx().getJoinClause(fact), dimsToQuery);
-
-    fromString += clause;
-    return fromString;
-  }
-
-  // Some refactoring needed to account for multiple join paths
-  public String getMergedJoinClause(CubeQueryContext cubeql, JoinClause joinClause,
-                                    Map<Dimension, CandidateDim> dimsToQuery) {
-    Set<String> clauses = new LinkedHashSet<String>();
-    String joinTypeStr = "";
-    JoinType joinType = JoinType.INNER;
-
-    // this flag is set to true if user has specified a partial join chain
-    if (!partialJoinChains) {
-      // User has not specified any join conditions. In this case, we rely on
-      // configuration for the join type
-      if (StringUtils.isNotBlank(joinTypeCfg)) {
-        joinType = JoinType.valueOf(joinTypeCfg.toUpperCase());
-        joinTypeStr = JoinResolver.getJoinTypeStr(joinType);
-      }
-    }
-
-    Iterator<JoinTree> iter = joinClause.getJoinTree().dft();
-    boolean hasBridgeTable = false;
-    boolean initedBridgeClauses = false;
-    StringBuilder bridgeSelectClause = new StringBuilder();
-    StringBuilder bridgeFromClause = new StringBuilder();
-    StringBuilder bridgeFilterClause = new StringBuilder();
-    StringBuilder bridgeJoinClause = new StringBuilder();
-    StringBuilder bridgeGroupbyClause = new StringBuilder();
-
-    while (iter.hasNext()) {
-      JoinTree cur = iter.next();
-      if (partialJoinChains) {
-        joinType = cur.getJoinType();
-        joinTypeStr = JoinResolver.getJoinTypeStr(joinType);
-      }
-      SchemaGraph.TableRelationship rel = cur.parentRelationship;
-      String toAlias, fromAlias;
-      fromAlias = cur.parent.getAlias();
-      toAlias = cur.getAlias();
-      hasBridgeTable = flattenBridgeTables && (hasBridgeTable || rel.isMapsToMany());
-      // We have to push user specified filters for the joined tables
-      String userFilter = null;
-      // Partition condition on the tables also needs to be pushed depending
-      // on the join
-      String storageFilter = null;
-
-      if (JoinType.INNER == joinType || JoinType.LEFTOUTER == joinType || JoinType.LEFTSEMI == joinType) {
-        // For inner and left joins push filter of right table
-        userFilter = partialJoinConditions.get(rel.getToTable());
-        if (partialJoinConditions.containsKey(rel.getFromTable())) {
-          if (StringUtils.isNotBlank(userFilter)) {
-            userFilter += (" AND " + partialJoinConditions.get(rel.getFromTable()));
-          } else {
-            userFilter = partialJoinConditions.get(rel.getFromTable());
-          }
-        }
-        storageFilter = getStorageFilter(dimsToQuery, rel.getToTable(), toAlias);
-        dimsToQuery.get(rel.getToTable()).setWhereClauseAdded();
-      } else if (JoinType.RIGHTOUTER == joinType) {
-        // For right outer joins, push filters of left table
-        userFilter = partialJoinConditions.get(rel.getFromTable());
-        if (partialJoinConditions.containsKey(rel.getToTable())) {
-          if (StringUtils.isNotBlank(userFilter)) {
-            userFilter += (" AND " + partialJoinConditions.get(rel.getToTable()));
-          } else {
-            userFilter = partialJoinConditions.get(rel.getToTable());
-          }
-        }
-        if (rel.getFromTable() instanceof Dimension) {
-          storageFilter = getStorageFilter(dimsToQuery, rel.getFromTable(), fromAlias);
-          dimsToQuery.get(rel.getFromTable()).setWhereClauseAdded();
-        }
-      } else if (JoinType.FULLOUTER == joinType) {
-        // For full outer we need to push filters of both left and right
-        // tables in the join clause
-        String leftFilter = null, rightFilter = null;
-        String leftStorageFilter = null, rightStorgeFilter = null;
-
-        if (StringUtils.isNotBlank(partialJoinConditions.get(rel.getFromTable()))) {
-          leftFilter = partialJoinConditions.get(rel.getFromTable()) + " and ";
-        }
-
-        if (rel.getFromTable() instanceof Dimension) {
-          leftStorageFilter = getStorageFilter(dimsToQuery, rel.getFromTable(), fromAlias);
-          if (StringUtils.isNotBlank((leftStorageFilter))) {
-            dimsToQuery.get(rel.getFromTable()).setWhereClauseAdded();
-          }
-        }
-
-        if (StringUtils.isNotBlank(partialJoinConditions.get(rel.getToTable()))) {
-          rightFilter = partialJoinConditions.get(rel.getToTable());
-        }
-
-        rightStorgeFilter = getStorageFilter(dimsToQuery, rel.getToTable(), toAlias);
-        if (StringUtils.isNotBlank(rightStorgeFilter)) {
-          if (StringUtils.isNotBlank((leftStorageFilter))) {
-            leftStorageFilter += " and ";
-          }
-          dimsToQuery.get(rel.getToTable()).setWhereClauseAdded();
-        }
-
-        userFilter = (leftFilter == null ? "" : leftFilter) + (rightFilter == null ? "" : rightFilter);
-        storageFilter =
-          (leftStorageFilter == null ? "" : leftStorageFilter)
-            + (rightStorgeFilter == null ? "" : rightStorgeFilter);
-      }
-      StringBuilder clause = new StringBuilder();
-
-      // if a bridge table is present in the path
-      if (hasBridgeTable) {
-        // if any relation has bridge table, the clause becomes the following :
-        // join (" select " + joinkey + " aggr over fields from bridge table + from bridgeTable + [where user/storage
-        // filters] + groupby joinkey) on joincond"
-        // Or
-        // " join (select " + joinkey + " aggr over fields from table reached through bridge table + from bridge table
-        // join <next tables> on join condition + [and user/storage filters] + groupby joinkey) on joincond
-        if (!initedBridgeClauses) {
-          // we just found a bridge table in the path we need to initialize the clauses for subquery required for
-          // aggregating fields of bridge table
-          // initiliaze select clause with join key
-          bridgeSelectClause.append(" (select ").append(toAlias).append(".").append(rel.getToColumn()).append(" as ")
-          .append(rel.getToColumn());
-          // group by join key
-          bridgeGroupbyClause.append(" group by ").append(toAlias).append(".").append(rel.getToColumn());
-          // from clause with bridge table
-          bridgeFromClause.append(" from ").append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
-          // we need to initialize filter clause with user filter clause or storgae filter if applicable
-          if (StringUtils.isNotBlank(userFilter)) {
-            bridgeFilterClause.append(userFilter);
-          }
-          if (StringUtils.isNotBlank(storageFilter)) {
-            if (StringUtils.isNotBlank(bridgeFilterClause.toString())) {
-              bridgeFilterClause.append(" and ");
-            }
-            bridgeFilterClause.append(storageFilter);
-          }
-          // initialize final join clause
-          bridgeJoinClause.append(" on ").append(fromAlias).append(".")
-            .append(rel.getFromColumn()).append(" = ").append("%s")
-            .append(".").append(rel.getToColumn());
-          initedBridgeClauses = true;
-        } else {
-          // if bridge clauses are already inited, this is a next table getting joined with bridge table
-          // we will append a simple join clause
-          bridgeFromClause.append(joinTypeStr).append(" join ");
-          bridgeFromClause.append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
-          bridgeFromClause.append(" on ").append(fromAlias).append(".")
-            .append(rel.getFromColumn()).append(" = ").append(toAlias)
-            .append(".").append(rel.getToColumn());
-
-          if (StringUtils.isNotBlank(userFilter)) {
-            bridgeFromClause.append(" and ").append(userFilter);
-          }
-          if (StringUtils.isNotBlank(storageFilter)) {
-            bridgeFromClause.append(" and ").append(storageFilter);
-          }
-        }
-        if (cubeql.getTblAliasToColumns().get(toAlias) != null
-          && !cubeql.getTblAliasToColumns().get(toAlias).isEmpty()) {
-          // there are fields selected from this table after seeing bridge table in path
-          // we should make subquery for this selection
-          clause.append(joinTypeStr).append(" join ");
-          clause.append(bridgeSelectClause.toString());
-          for (String col : cubeql.getTblAliasToColumns().get(toAlias)) {
-            clause.append(",").append(bridgeTableFieldAggr).append("(").append(toAlias)
-              .append(".").append(col)
-              .append(")")
-              .append(" as ").append(col);
-          }
-          String bridgeFrom = bridgeFromClause.toString();
-          clause.append(bridgeFrom);
-          String bridgeFilter = bridgeFilterClause.toString();
-          if (StringUtils.isNotBlank(bridgeFilter)) {
-            if (bridgeFrom.contains(" join ")) {
-              clause.append(" and ");
-            } else {
-              clause.append(" where");
-            }
-            clause.append(bridgeFilter.toString());
-          }
-          clause.append(bridgeGroupbyClause.toString());
-          clause.append(") ").append(toAlias);
-          clause.append(String.format(bridgeJoinClause.toString(), toAlias));
-          clauses.add(clause.toString());
-        }
-        if (cur.getSubtrees().isEmpty()) {
-          // clear bridge flags and builders, as there are no more clauses in this tree.
-          hasBridgeTable = false;
-          initedBridgeClauses = false;
-          bridgeSelectClause.setLength(0);
-          bridgeFromClause.setLength(0);
-          bridgeFilterClause.setLength(0);
-          bridgeJoinClause.setLength(0);
-          bridgeGroupbyClause.setLength(0);
-        }
-      } else {
-        // Simple join clause is :
-        // jointype + " join " + destTable + " on " + joincond + [" and" + userfilter] + ["and" + storageFilter]
-        clause.append(joinTypeStr).append(" join ");
-        //Add storage table name followed by alias
-        clause.append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
-        clause.append(" on ").append(fromAlias).append(".")
-          .append(rel.getFromColumn()).append(" = ").append(toAlias)
-          .append(".").append(rel.getToColumn());
-
-        if (StringUtils.isNotBlank(userFilter)) {
-          clause.append(" and ").append(userFilter);
-        }
-        if (StringUtils.isNotBlank(storageFilter)) {
-          clause.append(" and ").append(storageFilter);
-        }
-        clauses.add(clause.toString());
-      }
-    }
-    return StringUtils.join(clauses, "");
-  }
-
-  public Set<Dimension> getDimsOnPath(Map<Aliased<Dimension>, List<SchemaGraph.TableRelationship>> joinChain,
-    Set<Dimension> qdims) {
-    Set<Dimension> dimsOnPath = new HashSet<Dimension>();
-    for (Map.Entry<Aliased<Dimension>, List<SchemaGraph.TableRelationship>> entry : joinChain.entrySet()) {
-      List<SchemaGraph.TableRelationship> chain = entry.getValue();
-      Dimension table = entry.getKey().getObject();
-
-      // check if join with this dimension is required
-      if (!qdims.contains(table)) {
-        continue;
-      }
-
-      for (int i = chain.size() - 1; i >= 0; i--) {
-        SchemaGraph.TableRelationship rel = chain.get(i);
-        dimsOnPath.add((Dimension) rel.getToTable());
-      }
-    }
-    return dimsOnPath;
-  }
-
-  private String getStorageFilter(Map<Dimension, CandidateDim> dimsToQuery, AbstractCubeTable table, String alias) {
-    String whereClause = "";
-    if (dimsToQuery != null && dimsToQuery.get(table) != null) {
-      if (StringUtils.isNotBlank(dimsToQuery.get(table).getWhereClause())) {
-        whereClause = dimsToQuery.get(table).getWhereClause();
-        if (alias != null) {
-          whereClause = StorageUtil.getWhereClause(whereClause, alias);
-        }
-      }
-    }
-    return whereClause;
-  }
-
-  /**
-   * @return the joinsResolved
-   */
-  public boolean isJoinsResolved() {
-    return joinsResolved;
-  }
-
-  // Includes both queried join paths and optional join paths
-  public Set<String> getAllJoinPathColumnsOfTable(AbstractCubeTable table) {
-    Set<String> allPaths = new HashSet<String>();
-    for (Map<AbstractCubeTable, List<String>> optPaths : joinPathFromColumns.values()) {
-      if (optPaths.get(table) != null) {
-        allPaths.addAll(optPaths.get(table));
-      }
-    }
-
-    for (Map<AbstractCubeTable, List<String>> optPaths : joinPathToColumns.values()) {
-      if (optPaths.get(table) != null) {
-        allPaths.addAll(optPaths.get(table));
-      }
-    }
-
-    return allPaths;
-  }
-
-  public void pruneAllPaths(CubeInterface cube, final Set<CandidateFact> cfacts,
-    final Map<Dimension, CandidateDim> dimsToQuery) {
-    // Remove join paths which cannot be satisfied by the resolved candidate
-    // fact and dimension tables
-    if (cfacts != null) {
-      // include columns from all picked facts
-      Set<String> factColumns = new HashSet<String>();
-      for (CandidateFact cfact : cfacts) {
-        factColumns.addAll(cfact.getColumns());
-      }
-
-      for (List<SchemaGraph.JoinPath> paths : allPaths.values()) {
-        for (int i = 0; i < paths.size(); i++) {
-          SchemaGraph.JoinPath jp = paths.get(i);
-          List<String> cubeCols = jp.getColumnsForTable((AbstractCubeTable) cube);
-          if (cubeCols != null && !factColumns.containsAll(cubeCols)) {
-            // This path requires some columns from the cube which are not
-            // present in the candidate fact
-            // Remove this path
-            log.info("Removing join path:{} as columns :{} dont exist", jp, cubeCols);
-            paths.remove(i);
-            i--;
-          }
-        }
-      }
-      pruneEmptyPaths(allPaths);
-    }
-    pruneAllPaths(dimsToQuery);
-  }
-
-  /**
-   * Prunes allPaths by removing paths which contain columns that are not present in any candidate dims.
-   *
-   * @param candidateDims
-   */
-  public void pruneAllPathsForCandidateDims(Map<Dimension, Set<CandidateDim>> candidateDims) {
-    Map<Dimension, Set<String>> dimColumns = new HashMap<Dimension, Set<String>>();
-    // populate all columns present in candidate dims for each dimension
-    for (Map.Entry<Dimension, Set<CandidateDim>> entry : candidateDims.entrySet()) {
-      Dimension dim = entry.getKey();
-      Set<String> allColumns = new HashSet<String>();
-      for (CandidateDim cdim : entry.getValue()) {
-        allColumns.addAll(cdim.getColumns());
-      }
-      dimColumns.put(dim, allColumns);
-    }
-    for (List<SchemaGraph.JoinPath> paths : allPaths.values()) {
-      for (int i = 0; i < paths.size(); i++) {
-        SchemaGraph.JoinPath jp = paths.get(i);
-        for (AbstractCubeTable refTable : jp.getAllTables()) {
-          List<String> cols = jp.getColumnsForTable(refTable);
-          if (refTable instanceof Dimension) {
-            if (cols != null && (dimColumns.get(refTable) == null || !dimColumns.get(refTable).containsAll(cols))) {
-              // This path requires some columns from the cube which are not present in any candidate dim
-              // Remove this path
-              log.info("Removing join path:{} as columns :{} dont exist", jp, cols);
-              paths.remove(i);
-              i--;
-              break;
-            }
-          }
-        }
-      }
-    }
-    pruneEmptyPaths(allPaths);
-  }
-
-  private void pruneEmptyPaths(Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths) {
-    Iterator<Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>>> iter = allPaths.entrySet().iterator();
-    while (iter.hasNext()) {
-      Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> entry = iter.next();
-      if (entry.getValue().isEmpty()) {
-        iter.remove();
-      }
-    }
-  }
-
-  private Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> pruneFactPaths(CubeInterface cube,
-    final CandidateFact cfact) {
-    Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> prunedPaths
-      = new HashMap<Aliased<Dimension>, List<SchemaGraph.JoinPath>>();
-    // Remove join paths which cannot be satisfied by the candidate fact
-    for (Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> ppaths : allPaths.entrySet()) {
-      prunedPaths.put(ppaths.getKey(), new ArrayList<SchemaGraph.JoinPath>(ppaths.getValue()));
-      List<SchemaGraph.JoinPath> paths = prunedPaths.get(ppaths.getKey());
-      for (int i = 0; i < paths.size(); i++) {
-        SchemaGraph.JoinPath jp = paths.get(i);
-        List<String> cubeCols = jp.getColumnsForTable((AbstractCubeTable) cube);
-        if (cubeCols != null && !cfact.getColumns().containsAll(cubeCols)) {
-          // This path requires some columns from the cube which are not
-          // present in the candidate fact
-          // Remove this path
-          log.info("Removing join path:{} as columns :{} dont exist", jp, cubeCols);
-          paths.remove(i);
-          i--;
-        }
-      }
-    }
-    pruneEmptyPaths(prunedPaths);
-    return prunedPaths;
-  }
-
-  private void pruneAllPaths(final Map<Dimension, CandidateDim> dimsToQuery) {
-    // Remove join paths which cannot be satisfied by the resolved dimension
-    // tables
-    if (dimsToQuery != null && !dimsToQuery.isEmpty()) {
-      for (CandidateDim candidateDim : dimsToQuery.values()) {
-        Set<String> dimCols = candidateDim.dimtable.getAllFieldNames();
-        for (List<SchemaGraph.JoinPath> paths : allPaths.values()) {
-          for (int i = 0; i < paths.size(); i++) {
-            SchemaGraph.JoinPath jp = paths.get(i);
-            List<String> candidateDimCols = jp.getColumnsForTable(candidateDim.getBaseTable());
-            if (candidateDimCols != null && !dimCols.containsAll(candidateDimCols)) {
-              // This path requires some columns from the dimension which are
-              // not present in the candidate dim
-              // Remove this path
-              log.info("Removing join path:{} as columns :{} dont exist", jp, candidateDimCols);
-              paths.remove(i);
-              i--;
-            }
-          }
-        }
-      }
-      pruneEmptyPaths(allPaths);
-    }
-  }
-
-  /**
-   * There can be multiple join paths between a dimension and the target. Set of all possible join clauses is the
-   * cartesian product of join paths of all dimensions
-   */
-  private Iterator<JoinClause> getJoinClausesForAllPaths(final CandidateFact fact,
-    final Set<Dimension> qdims, final CubeQueryContext cubeql) {
-    Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths;
-    // if fact is passed only look at paths possible from fact to dims
-    if (fact != null) {
-      allPaths = pruneFactPaths(cubeql.getCube(), fact);
-    } else {
-      allPaths = new LinkedHashMap<Aliased<Dimension>, List<SchemaGraph.JoinPath>>(this.allPaths);
-    }
-    // prune allPaths with qdims
-    log.info("pruning allPaths before generating all permutations.");
-    log.info("allPaths: {}", allPaths);
-    log.info("qdims: {}", qdims);
-    pruneAllPathsWithQueriedDims(allPaths, qdims);
-
-    // Number of paths in each path set
-    final int[] groupSizes = new int[allPaths.values().size()];
-    // Total number of elements in the cartesian product
-    int numSamples = 1;
-    // All path sets
-    final List<List<SchemaGraph.JoinPath>> pathSets = new ArrayList<List<SchemaGraph.JoinPath>>();
-    // Dimension corresponding to the path sets
-    final List<Aliased<Dimension>> dimensions = new ArrayList<Aliased<Dimension>>(groupSizes.length);
-
-    int i = 0;
-    for (Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> entry : allPaths.entrySet()) {
-      dimensions.add(entry.getKey());
-      List<SchemaGraph.JoinPath> group = entry.getValue();
-      pathSets.add(group);
-      groupSizes[i] = group.size();
-      numSamples *= groupSizes[i];
-      i++;
-    }
-
-    final int[] selection = new int[groupSizes.length];
-    final int MAX_SAMPLE_COUNT = numSamples;
-
-    // Return a lazy iterator over all possible join chains
-    return new Iterator<JoinClause>() {
-      int sample = 0;
-
-      @Override
-      public boolean hasNext() {
-        return sample < MAX_SAMPLE_COUNT;
-      }
-
-      @Override
-      public JoinClause next() {
-        Map<Aliased<Dimension>, List<SchemaGraph.TableRelationship>> chain
-          = new LinkedHashMap<Aliased<Dimension>, List<SchemaGraph.TableRelationship>>();
-        //generate next permutation.
-        for (int i = groupSizes.length - 1, base = sample; i >= 0; base /= groupSizes[i], i--) {
-          selection[i] = base % groupSizes[i];
-        }
-        for (int i = 0; i < selection.length; i++) {
-          int selectedPath = selection[i];
-          List<SchemaGraph.TableRelationship> path = pathSets.get(i).get(selectedPath).getEdges();
-          chain.put(dimensions.get(i), path);
-        }
-
-        Set<Dimension> dimsOnPath = getDimsOnPath(chain, qdims);
-
-        sample++;
-        // Cost of join = number of tables joined in the clause
-        return new JoinClause(cubeql, chain, dimsOnPath);
-      }
-
-      @Override
-      public void remove() {
-        throw new UnsupportedOperationException("Cannot remove elements!");
-      }
-    };
-  }
-
-  /**
-   * Given allPaths, it will remove entries where key is a non-join chain dimension and not contained in qdims
-   *
-   * @param allPaths
-   * @param qdims
-   */
-  private void pruneAllPathsWithQueriedDims(Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths,
-    Set<Dimension> qdims) {
-    Iterator<Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>>> iter = allPaths.entrySet().iterator();
-    while (iter.hasNext()) {
-      Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> cur = iter.next();
-      if (!qdims.contains(cur.getKey().getObject())) {
-        log.info("removing from allPaths: {}", cur);
-        iter.remove();
-      }
-    }
-  }
-
-  public Set<Dimension> pickOptionalTables(final CandidateFact fact,
-    Set<Dimension> qdims, CubeQueryContext cubeql) throws LensException {
-    // Find the min cost join clause and add dimensions in the clause as optional dimensions
-    Set<Dimension> joiningOptionalTables = new HashSet<Dimension>();
-    if (qdims == null) {
-      return joiningOptionalTables;
-    }
-    // find least cost path
-    Iterator<JoinClause> itr = getJoinClausesForAllPaths(fact, qdims, cubeql);
-    JoinClause minCostClause = null;
-    while (itr.hasNext()) {
-      JoinClause clause = itr.next();
-      if (minCostClause == null || minCostClause.getCost() > clause.getCost()) {
-        minCostClause = clause;
-      }
-    }
-
-    if (minCostClause == null) {
-      throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getLensErrorInfo(),
-          qdims.toString(), autoJoinTarget.getName());
-    }
-
-    log.info("Fact: {} minCostClause:{}", fact, minCostClause);
-    if (fact != null) {
-      cubeql.getAutoJoinCtx().getFactClauses().put(fact, minCostClause);
-    } else {
-      cubeql.getAutoJoinCtx().setMinCostClause(minCostClause);
-    }
-    for (Dimension dim : minCostClause.getDimsInPath()) {
-      if (!qdims.contains(dim)) {
-        joiningOptionalTables.add(dim);
-      }
-    }
-
-    minCostClause.initChainColumns();
-    // prune candidate dims of joiningOptionalTables wrt joinging columns
-    for (Dimension dim : joiningOptionalTables) {
-      for (Iterator<CandidateDim> i = cubeql.getCandidateDimTables().get(dim).iterator(); i.hasNext();) {
-        CandidateDim cdim = i.next();
-        CubeDimensionTable dimtable = cdim.dimtable;
-        if (!cdim.getColumns().containsAll(minCostClause.chainColumns.get(dim))) {
-          i.remove();
-          log.info("Not considering dimtable:{} as its columns are not part of any join paths. Join columns:{}",
-            dimtable, minCostClause.chainColumns.get(dim));
-          cubeql.addDimPruningMsgs(dim, cdim.dimtable,
-            CandidateTablePruneCause.noColumnPartOfAJoinPath(minCostClause.chainColumns.get(dim)));
-        }
-      }
-      if (cubeql.getCandidateDimTables().get(dim).size() == 0) {
-        throw new LensException(LensCubeErrorCode.NO_DIM_HAS_COLUMN.getLensErrorInfo(), dim.getName(),
-          minCostClause.chainColumns.get(dim).toString());
-      }
-    }
-
-    return joiningOptionalTables;
-  }
-
-  public Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> getAllPaths() {
-    return allPaths;
-  }
-
-  public boolean isReachableDim(Dimension dim) {
-    Aliased<Dimension> aliased = Aliased.create(dim);
-    return isReachableDim(aliased);
-  }
-
-  public boolean isReachableDim(Dimension dim, String alias) {
-    Aliased<Dimension> aliased = Aliased.create(dim, alias);
-    return isReachableDim(aliased);
-  }
-
-  private boolean isReachableDim(Aliased<Dimension> aliased) {
-    return allPaths.containsKey(aliased) && !allPaths.get(aliased).isEmpty();
-  }
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
index 64dff16..4dcdbcf 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
@@ -18,9 +18,7 @@
  */
 package org.apache.lens.cube.parse;
 
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Set;
+import java.util.*;
 
 import org.apache.lens.cube.metadata.CubeDimensionTable;
 import org.apache.lens.cube.metadata.Dimension;
@@ -45,15 +43,19 @@ public class CandidateDim implements CandidateTable {
   @Setter
   private String whereClause;
   private boolean dbResolved = false;
-  private boolean whereClauseAdded = false;
+  private Map<String, Boolean> whereClauseAdded = new HashMap<>();
   private Dimension baseTable;
 
   public boolean isWhereClauseAdded() {
-    return whereClauseAdded;
+    return !whereClauseAdded.isEmpty();
   }
 
-  public void setWhereClauseAdded() {
-    this.whereClauseAdded = true;
+  public boolean isWhereClauseAdded(String alias) {
+    return whereClauseAdded.get(alias) == null ? false : whereClauseAdded.get(alias);
+  }
+
+  public void setWhereClauseAdded(String alias) {
+    this.whereClauseAdded.put(alias, true);
   }
 
   CandidateDim(CubeDimensionTable dimtable, Dimension dim) {

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 1fd1d17..06c2a0b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -34,6 +34,8 @@ import org.apache.lens.cube.error.NoCandidateDimAvailableException;
 import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import org.apache.lens.cube.parse.join.AutoJoinContext;
+import org.apache.lens.cube.parse.join.JoinUtils;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
@@ -387,7 +389,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   // required by a candidate table to get a denormalized field from reference
   // or required in a join chain
   @ToString
-  static class OptionalDimCtx {
+  public static class OptionalDimCtx {
     OptionalDimCtx() {
     }
 
@@ -407,44 +409,40 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
 
   public void addOptionalDimTable(String alias, CandidateTable candidate, boolean isRequiredInJoin, String cubeCol,
     boolean isRef, String... cols) throws LensException {
-    addOptionalDimTable(alias, candidate, isRequiredInJoin, cubeCol, true, null, cols);
+    addOptionalDimTable(alias, candidate, isRequiredInJoin, cubeCol, isRef, null, cols);
   }
 
   private void addOptionalDimTable(String alias, CandidateTable candidate, boolean isRequiredInJoin, String cubeCol,
     boolean isRef, String tableAlias, String... cols) throws LensException {
     alias = alias.toLowerCase();
-    try {
-      if (!addQueriedTable(alias, true)) {
-        throw new SemanticException("Could not add queried table or chain:" + alias);
-      }
-      Dimension dim = (Dimension) cubeTbls.get(alias);
-      OptionalDimCtx optDim = optionalDimensions.get(dim);
-      if (optDim == null) {
-        optDim = new OptionalDimCtx();
-        optionalDimensions.put(dim, optDim);
-      }
-      if (cols != null && candidate != null) {
-        for (String col : cols) {
-          optDim.colQueried.add(col);
-        }
-        optDim.requiredForCandidates.add(candidate);
-      }
-      if (cubeCol != null) {
-        if (isRef) {
-          updateRefColDim(cubeCol, dim);
-        } else {
-          updateExprColDim(tableAlias, cubeCol, dim);
-        }
-      }
-      if (!optDim.isRequiredInJoinChain) {
-        optDim.isRequiredInJoinChain = isRequiredInJoin;
+    if (!addQueriedTable(alias, true)) {
+      throw new LensException(LensCubeErrorCode.QUERIED_TABLE_NOT_FOUND.getLensErrorInfo(), alias);
+    }
+    Dimension dim = (Dimension) cubeTbls.get(alias);
+    OptionalDimCtx optDim = optionalDimensions.get(dim);
+    if (optDim == null) {
+      optDim = new OptionalDimCtx();
+      optionalDimensions.put(dim, optDim);
+    }
+    if (cols != null && candidate != null) {
+      for (String col : cols) {
+        optDim.colQueried.add(col);
       }
-      if (log.isDebugEnabled()) {
-        log.debug("Adding optional dimension:{} optDim:{} {} isRef:{}", dim , optDim,
-          (cubeCol == null ? "" : " for column:" + cubeCol),  isRef);
+      optDim.requiredForCandidates.add(candidate);
+    }
+    if (cubeCol != null) {
+      if (isRef) {
+        updateRefColDim(cubeCol, dim);
+      } else {
+        updateExprColDim(tableAlias, cubeCol, dim);
       }
-    } catch (HiveException e) {
-      throw new LensException(e);
+    }
+    if (!optDim.isRequiredInJoinChain) {
+      optDim.isRequiredInJoinChain = isRequiredInJoin;
+    }
+    if (log.isDebugEnabled()) {
+      log.debug("Adding optional dimension:{} optDim:{} {} isRef:{}", dim, optDim,
+        (cubeCol == null ? "" : " for column:" + cubeCol), isRef);
     }
   }
 
@@ -684,10 +682,13 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
     String fromString;
     if (getJoinAST() == null) {
       if (cube != null) {
+        if (dimensions.size() > 0) {
+          throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE.getLensErrorInfo());
+        }
         fromString = fact.getStorageString(getAliasForTableName(cube.getName()));
       } else {
         if (dimensions.size() != 1) {
-          throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAIABLE.getLensErrorInfo());
+          throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE.getLensErrorInfo());
         }
         Dimension dim = dimensions.iterator().next();
         fromString = dimsToQuery.get(dim).getStorageString(getAliasForTableName(dim.getName()));
@@ -702,7 +703,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
 
   private void getQLString(QBJoinTree joinTree, StringBuilder builder, CandidateFact fact,
     Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
-    String joiningTable = null;
+    List<String> joiningTables = new ArrayList<>();
     if (joinTree.getBaseSrc()[0] == null) {
       if (joinTree.getJoinSrc() != null) {
         getQLString(joinTree.getJoinSrc(), builder, fact, dimsToQuery);
@@ -710,12 +711,10 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
     } else { // (joinTree.getBaseSrc()[0] != null){
       String alias = joinTree.getBaseSrc()[0].toLowerCase();
       builder.append(getStorageStringWithAlias(fact, dimsToQuery, alias));
-      if (joinTree.getJoinCond()[0].getJoinType().equals(JoinType.RIGHTOUTER)) {
-        joiningTable = alias;
-      }
+      joiningTables.add(alias);
     }
     if (joinTree.getJoinCond() != null) {
-      builder.append(JoinResolver.getJoinTypeStr(joinTree.getJoinCond()[0].getJoinType()));
+      builder.append(JoinUtils.getJoinTypeStr(joinTree.getJoinCond()[0].getJoinType()));
       builder.append(" JOIN ");
     }
     if (joinTree.getBaseSrc()[1] == null) {
@@ -725,22 +724,24 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
     } else { // (joinTree.getBaseSrc()[1] != null){
       String alias = joinTree.getBaseSrc()[1].toLowerCase();
       builder.append(getStorageStringWithAlias(fact, dimsToQuery, alias));
-      if (joinTree.getJoinCond()[0].getJoinType().equals(JoinType.LEFTOUTER)) {
-        joiningTable = alias;
-      }
+      joiningTables.add(alias);
     }
 
     String joinCond = joinConds.get(joinTree);
     if (joinCond != null) {
       builder.append(" ON ");
       builder.append(joinCond);
-      if (joiningTable != null) {
-        // assuming the joining table to be dimension table
-        DimOnlyHQLContext.appendWhereClause(builder, getWhereClauseWithAlias(dimsToQuery, joiningTable), true);
-        dimsToQuery.get(cubeTbls.get(joiningTable)).setWhereClauseAdded();
+      // joining tables will contain all tables involved in joins.
+      // we need to push storage filters for Dimensions into join conditions, thus the following code
+      // takes care of the same.
+      for (String joiningTable : joiningTables) {
+        if (cubeTbls.get(joiningTable) instanceof Dimension) {
+          DimOnlyHQLContext.appendWhereClause(builder, getWhereClauseWithAlias(dimsToQuery, joiningTable), true);
+          dimsToQuery.get(cubeTbls.get(joiningTable)).setWhereClauseAdded(joiningTable);
+        }
       }
     } else {
-      throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAIABLE.getLensErrorInfo());
+      throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE.getLensErrorInfo());
     }
   }
 
@@ -1062,7 +1063,6 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
 
     return isCubeMeasure(msrname);
   }
-
   public boolean isAggregateExpr(String expr) {
     return aggregateExprs.contains(expr == null ? null : expr.toLowerCase());
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index 5c8bd84..c83b9ac 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -25,7 +25,7 @@ import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
-import org.apache.lens.cube.metadata.ReferencedDimAtrribute.ChainRefCol;
+import org.apache.lens.cube.metadata.ReferencedDimAttribute.ChainRefCol;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.ExpressionResolver.ExprSpecContext;
 import org.apache.lens.cube.parse.ExpressionResolver.ExpressionContext;
@@ -54,15 +54,13 @@ public class DenormalizationResolver implements ContextRewriter {
 
   @ToString
   public static class ReferencedQueriedColumn {
-    ReferencedDimAtrribute col;
+    ReferencedDimAttribute col;
     AbstractCubeTable srcTable;
-    transient List<TableReference> references = new ArrayList<>();
     transient List<ChainRefCol> chainRefCols = new ArrayList<>();
 
-    ReferencedQueriedColumn(ReferencedDimAtrribute col, AbstractCubeTable srcTable) {
+    ReferencedQueriedColumn(ReferencedDimAttribute col, AbstractCubeTable srcTable) {
       this.col = col;
       this.srcTable = srcTable;
-      references.addAll(col.getReferences());
       chainRefCols.addAll(col.getChainRefColumns());
     }
   }
@@ -151,16 +149,9 @@ public class DenormalizationResolver implements ContextRewriter {
             }
             refCols.add(refer);
             // Add to optional tables
-            if (refer.col.isChainedColumn()) {
-              for (ChainRefCol refCol : refer.col.getChainRefColumns()) {
-                cubeql.addOptionalDimTable(refCol.getChainName(), table, false, refer.col.getName(), true,
-                  refCol.getRefColumn());
-              }
-            } else {
-              for (TableReference reference : refer.col.getReferences()) {
-                cubeql.addOptionalDimTable(reference.getDestTable(), table, false, refer.col.getName(), true,
-                  reference.getDestColumn());
-              }
+            for (ChainRefCol refCol : refer.col.getChainRefColumns()) {
+              cubeql.addOptionalDimTable(refCol.getChainName(), table, false, refer.col.getName(), true,
+                refCol.getRefColumn());
             }
             return true;
           }
@@ -240,42 +231,23 @@ public class DenormalizationResolver implements ContextRewriter {
     private void pickColumnsForTable(String tbl) throws LensException {
       if (tableToRefCols.containsKey(tbl)) {
         for (ReferencedQueriedColumn refered : tableToRefCols.get(tbl)) {
-          if (!refered.col.isChainedColumn()) {
-            Iterator<TableReference> iter = refered.references.iterator();
-            while (iter.hasNext()) {
-              // remove unreachable references
-              TableReference reference = iter.next();
-              if (!cubeql.getAutoJoinCtx().isReachableDim(
-                (Dimension) cubeql.getCubeTableForAlias(reference.getDestTable()))) {
-                iter.remove();
-              }
-            }
-            if (refered.references.isEmpty()) {
-              throw new LensException(LensCubeErrorCode.NO_REF_COL_AVAILABLE.getLensErrorInfo(), refered);
-            }
-            PickedReference picked = new PickedReference(refered.references.iterator().next(),
-              cubeql.getAliasForTableName(refered.srcTable.getName()), tbl);
-            addPickedReference(refered.col.getName(), picked);
-            pickedRefs.add(picked);
-          } else {
-            Iterator<ChainRefCol> iter = refered.chainRefCols.iterator();
-            while (iter.hasNext()) {
-              // remove unreachable references
-              ChainRefCol reference = iter.next();
-              if (!cubeql.getAutoJoinCtx().isReachableDim(
-                (Dimension) cubeql.getCubeTableForAlias(reference.getChainName()), reference.getChainName())) {
-                iter.remove();
-              }
-            }
-            if (refered.chainRefCols.isEmpty()) {
-              throw new LensException("No chain reference column available for " + refered);
+          Iterator<ChainRefCol> iter = refered.chainRefCols.iterator();
+          while (iter.hasNext()) {
+            // remove unreachable references
+            ChainRefCol reference = iter.next();
+            if (!cubeql.getAutoJoinCtx().isReachableDim(
+              (Dimension) cubeql.getCubeTableForAlias(reference.getChainName()), reference.getChainName())) {
+              iter.remove();
             }
-            PickedReference picked =
-              new PickedReference(refered.chainRefCols.iterator().next(),
-                cubeql.getAliasForTableName(refered.srcTable.getName()), tbl);
-            addPickedReference(refered.col.getName(), picked);
-            pickedRefs.add(picked);
           }
+          if (refered.chainRefCols.isEmpty()) {
+            throw new LensException(LensCubeErrorCode.NO_REF_COL_AVAILABLE.getLensErrorInfo(), refered.col.getName());
+          }
+          PickedReference picked =
+            new PickedReference(refered.chainRefCols.iterator().next(),
+              cubeql.getAliasForTableName(refered.srcTable.getName()), tbl);
+          addPickedReference(refered.col.getName(), picked);
+          pickedRefs.add(picked);
         }
       }
     }
@@ -348,9 +320,9 @@ public class DenormalizationResolver implements ContextRewriter {
         } else {
           col = ((Dimension) tbl).getColumnByName(column);
         }
-        if (col instanceof ReferencedDimAtrribute) {
+        if (col instanceof ReferencedDimAttribute) {
           // considering all referenced dimensions to be denormalized columns
-          denormCtx.addReferencedCol(column, new ReferencedQueriedColumn((ReferencedDimAtrribute) col, tbl));
+          denormCtx.addReferencedCol(column, new ReferencedQueriedColumn((ReferencedDimAttribute) col, tbl));
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
index b253b94..318c82a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
@@ -105,9 +105,9 @@ abstract class DimHQLContext extends SimpleHQLContext {
       boolean added = (originalWhere != null);
       for (Dimension dim : queriedDims) {
         CandidateDim cdim = dimsToQuery.get(dim);
+        String alias = query.getAliasForTableName(dim.getName());
         if (!cdim.isWhereClauseAdded() && !StringUtils.isBlank(cdim.getWhereClause())) {
-          appendWhereClause(whereBuf, StorageUtil.getWhereClause(cdim, query.getAliasForTableName(dim.getName())),
-            added);
+          appendWhereClause(whereBuf, StorageUtil.getWhereClause(cdim, alias), added);
           added = true;
         }
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
index ab7a6d8..36ee9d4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
@@ -24,8 +24,8 @@ import org.apache.lens.cube.error.ConflictingFields;
 import org.apache.lens.cube.error.FieldsCannotBeQueriedTogetherException;
 import org.apache.lens.cube.metadata.CubeInterface;
 import org.apache.lens.cube.metadata.DerivedCube;
-import org.apache.lens.cube.metadata.ReferencedDimAtrribute;
-import org.apache.lens.cube.metadata.ReferencedDimAtrribute.ChainRefCol;
+import org.apache.lens.cube.metadata.ReferencedDimAttribute;
+import org.apache.lens.cube.metadata.ReferencedDimAttribute.ChainRefCol;
 import org.apache.lens.cube.parse.ExpressionResolver.ExprSpecContext;
 import org.apache.lens.server.api.error.LensException;
 
@@ -166,9 +166,8 @@ public class FieldValidator implements ContextRewriter {
 
               // If this is a referenced dim attribute leading to a chain, then instead of adding this
               // column, we add the source columns of the chain.
-              if (cube.getDimAttributeByName(colName) instanceof ReferencedDimAtrribute
-                && ((ReferencedDimAtrribute) cube.getDimAttributeByName(colName)).isChainedColumn()) {
-                ReferencedDimAtrribute rdim = (ReferencedDimAtrribute) cube.getDimAttributeByName(colName);
+              if (cube.getDimAttributeByName(colName) instanceof ReferencedDimAttribute) {
+                ReferencedDimAttribute rdim = (ReferencedDimAttribute) cube.getDimAttributeByName(colName);
                 for (ChainRefCol refCol : rdim.getChainRefColumns()) {
                   chainSourceColumns.addAll(cube.getChainByName(refCol.getChainName()).getSourceColumns());
                 }

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index bfb65c7..b1deb07 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -206,7 +206,7 @@ public final class HQLParser {
     }
 
     System.out.print(node.getText() + " [" + tokenMapping.get(node.getToken().getType()) + "]");
-    System.out.print(" (l" + level + "c" + child + "p" + node.getCharPositionInLine() +")");
+    System.out.print(" (l" + level + "c" + child + "p" + node.getCharPositionInLine() + ")");
 
     if (node.getChildCount() > 0) {
       System.out.println(" {");

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinClause.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinClause.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinClause.java
deleted file mode 100644
index d9a8249..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinClause.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import java.util.*;
-
-import org.apache.lens.cube.metadata.AbstractCubeTable;
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.cube.metadata.SchemaGraph;
-
-import org.apache.hadoop.hive.ql.parse.JoinType;
-
-import lombok.Getter;
-import lombok.ToString;
-
-@ToString
-public class JoinClause implements Comparable<JoinClause> {
-  private final int cost;
-  // all dimensions in path except target
-  @Getter
-  private final Set<Dimension> dimsInPath;
-  private CubeQueryContext cubeql;
-  private final Map<Aliased<Dimension>, List<SchemaGraph.TableRelationship>> chain;
-  @Getter
-  private final JoinTree joinTree;
-  transient Map<AbstractCubeTable, Set<String>> chainColumns = new HashMap<AbstractCubeTable, Set<String>>();
-
-  public JoinClause(CubeQueryContext cubeql, Map<Aliased<Dimension>,
-    List<SchemaGraph.TableRelationship>> chain, Set<Dimension> dimsInPath) {
-    this.cubeql = cubeql;
-    this.chain = chain;
-    this.joinTree = mergeJoinChains(chain);
-    this.cost = joinTree.getNumEdges();
-    this.dimsInPath = dimsInPath;
-  }
-
-  void initChainColumns() {
-    for (List<SchemaGraph.TableRelationship> path : chain.values()) {
-      for (SchemaGraph.TableRelationship edge : path) {
-        Set<String> fcols = chainColumns.get(edge.getFromTable());
-        if (fcols == null) {
-          fcols = new HashSet<String>();
-          chainColumns.put(edge.getFromTable(), fcols);
-        }
-        fcols.add(edge.getFromColumn());
-
-        Set<String> tocols = chainColumns.get(edge.getToTable());
-        if (tocols == null) {
-          tocols = new HashSet<String>();
-          chainColumns.put(edge.getToTable(), tocols);
-        }
-        tocols.add(edge.getToColumn());
-      }
-    }
-  }
-
-  public int getCost() {
-    return cost;
-  }
-
-  @Override
-  public int compareTo(JoinClause joinClause) {
-    return cost - joinClause.getCost();
-  }
-
-  /**
-   * Takes chains and merges them in the form of a tree. If two chains have some common path till some table and
-   * bifurcate from there, then in the chain, both paths will have the common path but the resultant tree will have
-   * single path from root(cube) to that table and paths will bifurcate from there.
-   * <p/>
-   * For example, citystate   =   [basecube.cityid=citydim.id], [citydim.stateid=statedim.id]
-   *              cityzip     =   [basecube.cityid=citydim.id], [citydim.zipcode=zipdim.code]
-   * <p/>
-   * Without merging, the behaviour is like this:
-   * <p/>
-   * <p/>
-   *                  (basecube.cityid=citydim.id)          (citydim.stateid=statedim.id)
-   *                  _____________________________citydim____________________________________statedim
-   *                 |
-   *   basecube------|
-   *                 |_____________________________citydim____________________________________zipdim
-   *
-   *                  (basecube.cityid=citydim.id)          (citydim.zipcode=zipdim.code)
-   *
-   * <p/>
-   * Merging will result in a tree like following
-   * <p/>                                                  (citydim.stateid=statedim.id)
-   * <p/>                                                ________________________________ statedim
-   *             (basecube.cityid=citydim.id)           |
-   * basecube-------------------------------citydim---- |
-   *                                                    |________________________________  zipdim
-   *
-   *                                                       (citydim.zipcode=zipdim.code)
-   *
-   * <p/>
-   * Doing this will reduce the number of joins wherever possible.
-   *
-   * @param chain Joins in Linear format.
-   * @return Joins in Tree format
-   */
-  public JoinTree mergeJoinChains(Map<Aliased<Dimension>, List<SchemaGraph.TableRelationship>> chain) {
-    Map<String, Integer> aliasUsage = new HashMap<String, Integer>();
-    JoinTree root = JoinTree.createRoot();
-    for (Map.Entry<Aliased<Dimension>, List<SchemaGraph.TableRelationship>> entry : chain.entrySet()) {
-      JoinTree current = root;
-      // Last element in this list is link from cube to first dimension
-      for (int i = entry.getValue().size() - 1; i >= 0; i--) {
-        // Adds a child if needed, or returns a child already existing corresponding to the given link.
-        current = current.addChild(entry.getValue().get(i), cubeql, aliasUsage);
-        if (cubeql.getAutoJoinCtx().isPartialJoinChains()) {
-          JoinType joinType = cubeql.getAutoJoinCtx().getTableJoinTypeMap().get(entry.getKey().getObject());
-          //This ensures if (sub)paths are same, but join type is not same, merging will not happen.
-          current.setJoinType(joinType);
-        }
-      }
-      // This is a destination table. Decide alias separately. e.g. chainname
-      // nullcheck is necessary because dimensions can be destinations too. In that case getAlias() == null
-      if (entry.getKey().getAlias() != null) {
-        current.setAlias(entry.getKey().getAlias());
-      }
-    }
-    if (root.getSubtrees().size() > 0) {
-      root.setAlias(cubeql.getAliasForTableName(
-        root.getSubtrees().keySet().iterator().next().getFromTable().getName()));
-    }
-    return root;
-  }
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index de3a16e..b861bb6 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -24,14 +24,16 @@ import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
-import org.apache.lens.cube.metadata.SchemaGraph.TableRelationship;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import org.apache.lens.cube.metadata.join.JoinPath;
+import org.apache.lens.cube.parse.join.AutoJoinContext;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.*;
 
+import com.google.common.collect.Sets;
+
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -39,41 +41,15 @@ import lombok.extern.slf4j.Slf4j;
  */
 @Slf4j
 class JoinResolver implements ContextRewriter {
-
-  private Map<AbstractCubeTable, String> partialJoinConditions;
   private Map<AbstractCubeTable, JoinType> tableJoinTypeMap;
-  private boolean partialJoinChain;
   private AbstractCubeTable target;
   private HashMap<Dimension, List<JoinChain>> dimensionInJoinChain = new HashMap<Dimension, List<JoinChain>>();
 
   public JoinResolver(Configuration conf) {
   }
 
-  static String getJoinTypeStr(JoinType joinType) {
-    if (joinType == null) {
-      return "";
-    }
-    switch (joinType) {
-    case FULLOUTER:
-      return " full outer";
-    case INNER:
-      return " inner";
-    case LEFTOUTER:
-      return " left outer";
-    case LEFTSEMI:
-      return " left semi";
-    case UNIQUE:
-      return " unique";
-    case RIGHTOUTER:
-      return " right outer";
-    default:
-      return "";
-    }
-  }
-
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws LensException {
-    partialJoinConditions = new HashMap<AbstractCubeTable, String>();
     tableJoinTypeMap = new HashMap<AbstractCubeTable, JoinType>();
     try {
       resolveJoins(cubeql);
@@ -86,9 +62,25 @@ class JoinResolver implements ContextRewriter {
     QB cubeQB = cubeql.getQb();
     boolean joinResolverDisabled = cubeql.getConf().getBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS,
         CubeQueryConfUtil.DEFAULT_DISABLE_AUTO_JOINS);
+
+    if (!joinResolverDisabled && (!cubeql.getNonChainedDimensions().isEmpty() && cubeql.hasCubeInQuery())
+      || ((cubeql.getNonChainedDimensions().size() > 1) && !cubeql.hasCubeInQuery())) {
+      log.warn("Disabling auto join resolver as there are direct dimensions queried");
+      joinResolverDisabled = true;
+    }
     if (joinResolverDisabled) {
       if (cubeql.getJoinAST() != null) {
         cubeQB.setQbJoinTree(genJoinTree(cubeql.getJoinAST(), cubeql));
+      } else {
+        if (cubeql.hasCubeInQuery()) {
+          if (!cubeql.getNonChainedDimensions().isEmpty()) {
+            throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE.getLensErrorInfo());
+          }
+        } else {
+          if (cubeql.getNonChainedDimensions().size() > 1) {
+            throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE.getLensErrorInfo());
+          }
+        }
       }
     } else {
       autoResolveJoins(cubeql);
@@ -118,37 +110,28 @@ class JoinResolver implements ContextRewriter {
    * @throws HiveException
    */
   private void autoResolveJoins(CubeQueryContext cubeql) throws LensException, HiveException {
-    // Check if this query needs a join -
-    // A join is needed if there is a cube and at least one dimension, or, 0
-    // cubes and more than one
-    // dimensions
+    if (cubeql.getJoinchains().isEmpty()) {
+      // Joins not required
+      log.info("No dimension tables to resolve and no join chains present!");
+      return;
+    }
     processJoinChains(cubeql);
-    Set<Dimension> dimensions = cubeql.getNonChainedDimensions();
-    // Add dimensions specified in the partial join tree
-    ASTNode joinClause = cubeql.getQb().getParseInfo().getJoinExpr();
-    if (joinClause == null) {
+    // Find the target
+    if (cubeql.hasCubeInQuery()) {
       // Only cube in the query
-      if (cubeql.hasCubeInQuery()) {
-        target = (AbstractCubeTable) cubeql.getCube();
-      } else {
-        String targetDimAlias = cubeql.getQb().getTabAliases().iterator().next();
-        String targetDimTable = cubeql.getQb().getTabNameForAlias(targetDimAlias);
-        if (targetDimTable == null) {
-          log.warn("Null table for alias {}", targetDimAlias);
-          return;
-        }
-        target = cubeql.getMetastoreClient().getDimension(targetDimTable);
+      target = (AbstractCubeTable) cubeql.getCube();
+    } else {
+      String targetDimAlias = cubeql.getQb().getTabAliases().iterator().next();
+      String targetDimTable = cubeql.getQb().getTabNameForAlias(targetDimAlias);
+      if (targetDimTable == null) {
+        log.warn("Null table for alias {}", targetDimAlias);
+        throw new LensException(LensCubeErrorCode.JOIN_TARGET_NOT_CUBE_TABLE.getLensErrorInfo(), targetDimAlias);
+      }
+      target = cubeql.getMetastoreClient().getDimension(targetDimTable);
+      if (target == null) {
+        log.warn("Can't resolve joins for null target");
+        throw new LensException(LensCubeErrorCode.JOIN_TARGET_NOT_CUBE_TABLE.getLensErrorInfo(), targetDimTable);
       }
-    }
-    searchDimensionTables(cubeql.getMetastoreClient(), joinClause);
-    if (target == null) {
-      log.warn("Can't resolve joins for null target");
-      return;
-    }
-
-    Set<Dimension> dimTables = new HashSet<Dimension>(dimensions);
-    for (AbstractCubeTable partiallyJoinedTable : partialJoinConditions.keySet()) {
-      dimTables.add((Dimension) partiallyJoinedTable);
     }
 
     for (JoinChain chain : cubeql.getJoinchains().values()) {
@@ -157,84 +140,14 @@ class JoinResolver implements ContextRewriter {
       }
     }
 
-    // Remove target
-    dimTables.remove(target);
-    if (dimTables.isEmpty() && cubeql.getJoinchains().isEmpty()) {
-      // Joins not required
-      log.info("No dimension tables to resolve and no join chains present!");
-      return;
-    }
-
+    Map<Aliased<Dimension>, List<JoinPath>> multipleJoinPaths = new LinkedHashMap<>();
 
-    SchemaGraph graph = cubeql.getMetastoreClient().getSchemaGraph();
-    Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> multipleJoinPaths =
-      new LinkedHashMap<Aliased<Dimension>, List<SchemaGraph.JoinPath>>();
-
-    // Resolve join path for each dimension accessed in the query
-    for (Dimension joinee : dimTables) {
-      if (dimensionInJoinChain.get(joinee) == null) {
-        // Find all possible join paths
-        SchemaGraph.GraphSearch search = new SchemaGraph.GraphSearch(joinee, target, graph);
-        List<SchemaGraph.JoinPath> joinPaths = search.findAllPathsToTarget();
-        if (joinPaths != null && !joinPaths.isEmpty()) {
-          Aliased<Dimension> aliasedJoinee = Aliased.create(joinee);
-          multipleJoinPaths.put(aliasedJoinee, new ArrayList<SchemaGraph.JoinPath>(search.findAllPathsToTarget()));
-          addOptionalTables(cubeql, multipleJoinPaths.get(aliasedJoinee), cubeql.getDimensions().contains(joinee));
-        } else {
-          // No link to cube from this dim, can't proceed with query
-          if (log.isDebugEnabled()) {
-            graph.print();
-          }
-          log.warn("No join path between {} and {}", joinee.getName(), target.getName());
-          if (cubeql.getDimensions().contains(joinee)) {
-            throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getLensErrorInfo(),
-                joinee.getName(), target.getName());
-          } else {
-            // if joinee is optional dim table, remove those candidate facts
-            Set<CandidateTable> candidates = cubeql.getOptionalDimensionMap().get(joinee).requiredForCandidates;
-            for (CandidateTable candidate : candidates) {
-              if (candidate instanceof CandidateFact) {
-                if (cubeql.getCandidateFacts().contains(candidate)) {
-                  log.info("Not considering fact:{} as there is no join path to {}", candidate, joinee);
-                  cubeql.getCandidateFacts().remove(candidate);
-                  cubeql.addFactPruningMsgs(((CandidateFact) candidate).fact, new CandidateTablePruneCause(
-                    CandidateTablePruneCode.COLUMN_NOT_FOUND));
-                }
-              } else if (cubeql.getCandidateDimTables().containsKey(((CandidateDim) candidate).getBaseTable())) {
-                log.info("Not considering dimtable:{} as there is no join path to {}", candidate, joinee);
-                cubeql.getCandidateDimTables().get(((CandidateDim) candidate).getBaseTable()).remove(candidate);
-                cubeql.addDimPruningMsgs(
-                  (Dimension) candidate.getBaseTable(), (CubeDimensionTable) candidate.getTable(),
-                  new CandidateTablePruneCause(CandidateTablePruneCode.COLUMN_NOT_FOUND)
-                );
-              }
-            }
-          }
-        }
-      } else if (dimensionInJoinChain.get(joinee).size() > 1) {
-        throw new LensException("Table " + joinee.getName() + " has "
-          +dimensionInJoinChain.get(joinee).size() + " different paths through joinchains "
-          +"(" + dimensionInJoinChain.get(joinee) + ")"
-          +" used in query. Couldn't determine which one to use");
-      } else {
-        // the case when dimension is used only once in all joinchains.
-        if (isJoinchainDestination(cubeql, joinee)) {
-          throw new LensException("Table " + joinee.getName() + " is getting accessed via two different names: "
-            + "[" + dimensionInJoinChain.get(joinee).get(0).getName() + ", " + joinee.getName() + "]");
-        }
-        // table is accessed with chain and no chain
-        if (cubeql.getNonChainedDimensions().contains(joinee)) {
-          throw new LensException("Table " + joinee.getName() + " is getting accessed via joinchain: "
-            + dimensionInJoinChain.get(joinee).get(0).getName() + " and no chain at all");
-        }
-      }
-    }
     // populate paths from joinchains
     for (JoinChain chain : cubeql.getJoinchains().values()) {
       Dimension dimension = cubeql.getMetastoreClient().getDimension(chain.getDestTable());
       Aliased<Dimension> aliasedDimension = Aliased.create(dimension, chain.getName());
       if (multipleJoinPaths.get(aliasedDimension) == null) {
-        multipleJoinPaths.put(aliasedDimension, new ArrayList<SchemaGraph.JoinPath>());
+        multipleJoinPaths.put(aliasedDimension, new ArrayList<JoinPath>());
       }
       multipleJoinPaths.get(aliasedDimension).addAll(
         chain.getRelationEdges(cubeql.getMetastoreClient()));
@@ -243,98 +156,15 @@ class JoinResolver implements ContextRewriter {
       CubeQueryConfUtil.DEFAULT_ENABLE_FLATTENING_FOR_BRIDGETABLES);
     String bridgeTableFieldAggr = cubeql.getConf().get(CubeQueryConfUtil.BRIDGE_TABLE_FIELD_AGGREGATOR,
       CubeQueryConfUtil.DEFAULT_BRIDGE_TABLE_FIELD_AGGREGATOR);
+    Set<Dimension> requiredDimensions = Sets.newHashSet(cubeql.getDimensions());
+    requiredDimensions.removeAll(cubeql.getOptionalDimensions());
     AutoJoinContext joinCtx =
-      new AutoJoinContext(multipleJoinPaths, cubeql.optionalDimensions, partialJoinConditions, partialJoinChain,
+      new AutoJoinContext(multipleJoinPaths, requiredDimensions,
         tableJoinTypeMap, target, cubeql.getConf().get(CubeQueryConfUtil.JOIN_TYPE_KEY), true, flattenBridgeTables,
         bridgeTableFieldAggr);
     cubeql.setAutoJoinCtx(joinCtx);
   }
 
-  private boolean isJoinchainDestination(CubeQueryContext cubeql, Dimension dimension) {
-    for (JoinChain chain : cubeql.getJoinchains().values()) {
-      if (chain.getDestTable().equalsIgnoreCase(dimension.getName())) {
-        return true;
-      }
-    }
-    return false;
-  }
-
-  private void addOptionalTables(CubeQueryContext cubeql, List<SchemaGraph.JoinPath> joinPathList, boolean required)
-    throws LensException {
-    for (SchemaGraph.JoinPath joinPath : joinPathList) {
-      for (TableRelationship rel : joinPath.getEdges()) {
-        // Add the joined tables to the queries table sets so that they are
-        // resolved in candidate resolver
-        cubeql.addOptionalJoinDimTable(rel.getToTable().getName(), required);
-      }
-    }
-  }
-
-  private void setTarget(CubeMetastoreClient metastore, ASTNode node) throws  HiveException, LensException  {
-    String targetTableName = HQLParser.getString(HQLParser.findNodeByPath(node, TOK_TABNAME, Identifier));
-    if (metastore.isDimension(targetTableName)) {
-      target = metastore.getDimension(targetTableName);
-    } else if (metastore.isCube(targetTableName)) {
-      target = (AbstractCubeTable) metastore.getCube(targetTableName);
-    } else {
-      throw new LensException(LensCubeErrorCode.JOIN_TARGET_NOT_CUBE_TABLE.getLensErrorInfo(), targetTableName);
-    }
-  }
-
-  private void searchDimensionTables(CubeMetastoreClient metastore, ASTNode node) throws HiveException, LensException {
-    if (node == null) {
-      return;
-    }
-    // User has specified join conditions partially. We need to store join
-    // conditions as well as join types
-    partialJoinChain = true;
-    if (isJoinToken(node)) {
-      ASTNode left = (ASTNode) node.getChild(0);
-      ASTNode right = (ASTNode) node.getChild(1);
-      // Get table name and
-
-      String tableName = HQLParser.getString(HQLParser.findNodeByPath(right, TOK_TABNAME, Identifier));
-
-      Dimension dimension = metastore.getDimension(tableName);
-      String joinCond = "";
-      if (node.getChildCount() > 2) {
-        // User has specified a join condition for filter pushdown.
-        joinCond = HQLParser.getString((ASTNode) node.getChild(2));
-      }
-      partialJoinConditions.put(dimension, joinCond);
-      tableJoinTypeMap.put(dimension, getJoinType(node));
-      if (isJoinToken(left)) {
-        searchDimensionTables(metastore, left);
-      } else {
-        if (left.getToken().getType() == TOK_TABREF) {
-          setTarget(metastore, left);
-        }
-      }
-    } else if (node.getToken().getType() == TOK_TABREF) {
-      setTarget(metastore, node);
-    }
-
-  }
-
-  private JoinType getJoinType(ASTNode node) {
-    switch (node.getToken().getType()) {
-    case TOK_LEFTOUTERJOIN:
-      return JoinType.LEFTOUTER;
-    case TOK_LEFTSEMIJOIN:
-      return JoinType.LEFTSEMI;
-    case TOK_RIGHTOUTERJOIN:
-      return JoinType.RIGHTOUTER;
-    case TOK_FULLOUTERJOIN:
-      return JoinType.FULLOUTER;
-    case TOK_JOIN:
-      return JoinType.INNER;
-    case TOK_UNIQUEJOIN:
-      return JoinType.UNIQUE;
-    default:
-      return JoinType.INNER;
-    }
-  }
-
   // Recursively find out join conditions
   private QBJoinTree genJoinTree(ASTNode joinParseTree, CubeQueryContext cubeql) throws LensException {
     QBJoinTree joinTree = new QBJoinTree();
@@ -418,7 +248,7 @@ class JoinResolver implements ContextRewriter {
       }
       children[1] = alias;
       joinTree.setBaseSrc(children);
-      // remember rhs table for semijoin
+      // remember rhs table for semi join
       if (!joinTree.getNoSemiJoin()) {
         joinTree.addRHSSemijoin(alias);
       }
@@ -431,7 +261,7 @@ class JoinResolver implements ContextRewriter {
       cubeql.setJoinCond(joinTree, HQLParser.getString(joinCond));
     } else {
       // No join condition specified. this should be an error
-      throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAIABLE.getLensErrorInfo());
+      throw new LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE.getLensErrorInfo());
     }
     return joinTree;
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinTree.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinTree.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinTree.java
deleted file mode 100644
index 5a294af..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinTree.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import java.util.*;
-
-import org.apache.lens.cube.metadata.AbstractCubeTable;
-import org.apache.lens.cube.metadata.SchemaGraph;
-
-import org.apache.hadoop.hive.ql.parse.JoinType;
-
-import lombok.Data;
-import lombok.EqualsAndHashCode;
-import lombok.ToString;
-
-@Data
-@ToString(exclude = "parent")
-@EqualsAndHashCode(exclude = "parent")
-public class JoinTree {
-  //parent of the node
-  JoinTree parent;
-  // current table is parentRelationship.destTable;
-  SchemaGraph.TableRelationship parentRelationship;
-  // Alias for the join clause
-  String alias;
-  private Map<SchemaGraph.TableRelationship, JoinTree> subtrees =
-    new LinkedHashMap<SchemaGraph.TableRelationship, JoinTree>();
-  // Number of nodes from root to this node. depth of root is 0. Unused for now.
-  private int depthFromRoot;
-  // join type of the current table.
-  JoinType joinType;
-
-  public static JoinTree createRoot() {
-    return new JoinTree(null, null, 0);
-  }
-
-  public JoinTree(JoinTree parent, SchemaGraph.TableRelationship tableRelationship,
-                  int depthFromRoot) {
-    this.parent = parent;
-    this.parentRelationship = tableRelationship;
-    this.depthFromRoot = depthFromRoot;
-  }
-
-  public JoinTree addChild(SchemaGraph.TableRelationship tableRelationship,
-                           CubeQueryContext cubeql, Map<String, Integer> aliasUsage) {
-    if (getSubtrees().get(tableRelationship) == null) {
-      JoinTree current = new JoinTree(this, tableRelationship,
-        this.depthFromRoot + 1);
-      // Set alias. Need to compute only when new node is being created.
-      // The following code ensures that For intermediate tables, aliases are given
-      // in the order citydim, citydim_0, citydim_1, ...
-      // And for destination tables, an alias will be decided from here but might be
-      // overridden outside this function.
-      AbstractCubeTable destTable = tableRelationship.getToTable();
-      current.setAlias(cubeql.getAliasForTableName(destTable.getName()));
-      if (aliasUsage.get(current.getAlias()) == null) {
-        aliasUsage.put(current.getAlias(), 0);
-      } else {
-        aliasUsage.put(current.getAlias(), aliasUsage.get(current.getAlias()) + 1);
-        current.setAlias(current.getAlias() + "_" + (aliasUsage.get(current.getAlias()) - 1));
-      }
-      getSubtrees().put(tableRelationship, current);
-    }
-    return getSubtrees().get(tableRelationship);
-  }
-
-  // Recursive computation of number of edges.
-  public int getNumEdges() {
-    int ret = 0;
-    for (JoinTree tree : getSubtrees().values()) {
-      ret += 1;
-      ret += tree.getNumEdges();
-    }
-    return ret;
-  }
-
-  public boolean isLeaf() {
-    return getSubtrees().isEmpty();
-  }
-
-  // Breadth First Traversal. Unused currently.
-  public Iterator<JoinTree> bft() {
-    return new Iterator<JoinTree>() {
-      List<JoinTree> remaining = new ArrayList<JoinTree>() {
-        {
-          addAll(getSubtrees().values());
-        }
-      };
-
-      @Override
-      public boolean hasNext() {
-        return remaining.isEmpty();
-      }
-
-      @Override
-      public JoinTree next() {
-        JoinTree retval = remaining.remove(0);
-        remaining.addAll(retval.getSubtrees().values());
-        return retval;
-      }
-
-      @Override
-      public void remove() {
-        throw new RuntimeException("Not implemented");
-      }
-    };
-  }
-
-  // Depth first traversal of the tree. Used in forming join string.
-  public Iterator<JoinTree> dft() {
-    return new Iterator<JoinTree>() {
-      Stack<JoinTree> joinTreeStack = new Stack<JoinTree>() {
-        {
-          addAll(getSubtrees().values());
-        }
-      };
-
-      @Override
-      public boolean hasNext() {
-        return !joinTreeStack.isEmpty();
-      }
-
-      @Override
-      public JoinTree next() {
-        JoinTree retval = joinTreeStack.pop();
-        joinTreeStack.addAll(retval.getSubtrees().values());
-        return retval;
-      }
-
-      @Override
-      public void remove() {
-        throw new RuntimeException("Not implemented");
-      }
-    };
-  }
-
-  public Set<JoinTree> leaves() {
-    Set<JoinTree> leaves = new HashSet<JoinTree>();
-    Iterator<JoinTree> dft = dft();
-    while (dft.hasNext()) {
-      JoinTree cur = dft.next();
-      if (cur.isLeaf()) {
-        leaves.add(cur);
-      }
-    }
-    return leaves;
-  }
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/908530f5/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
index 67b3f40..f9636d1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageUtil.java
@@ -23,6 +23,8 @@ import java.util.*;
 import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.cube.metadata.StorageConstants;
 
+import org.apache.commons.lang.StringUtils;
+
 public final class StorageUtil {
   private StorageUtil() {
 
@@ -153,6 +155,10 @@ public final class StorageUtil {
   }
 
   public static String getWhereClause(CandidateDim dim, String alias) {
-    return getWhereClause(dim.getWhereClause(), alias);
+    if (!dim.isWhereClauseAdded(alias) && !StringUtils.isBlank(dim.getWhereClause())) {
+      return getWhereClause(dim.getWhereClause(), alias);
+    } else {
+      return null;
+    }
   }
 }


[12/51] [abbrv] lens git commit: LENS-905 : Group by is not promoted when keys projected along with having clause

Posted by de...@apache.org.
LENS-905 : Group by is not promoted when keys projected along with having clause


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/c1790813
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/c1790813
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/c1790813

Branch: refs/heads/current-release-line
Commit: c17908134e004c646724d7e464252ec76a43113d
Parents: d6aeecc
Author: Sushil Mohanty <su...@apache.org>
Authored: Fri Dec 18 11:50:33 2015 +0530
Committer: Sushil Mohanty <su...@apache.org>
Committed: Fri Dec 18 11:50:33 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/cube/parse/AggregateResolver.java    |  4 +++-
 .../apache/lens/cube/parse/TestAggregateResolver.java    | 11 +++++++++--
 2 files changed, 12 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/c1790813/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
index b544a67..39bd1cc 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
@@ -97,7 +97,9 @@ class AggregateResolver implements ContextRewriter {
     Configuration distConf = cubeql.getConf();
     boolean isDimOnlyDistinctEnabled = distConf.getBoolean(CubeQueryConfUtil.ENABLE_ATTRFIELDS_ADD_DISTINCT,
       CubeQueryConfUtil.DEFAULT_ATTR_FIELDS_ADD_DISTINCT);
-    if (isDimOnlyDistinctEnabled) {
+    //Having clause will always work with measures, if only keys projected
+    //query should skip distinct and promote group by.
+    if (cubeql.getHavingAST() == null && isDimOnlyDistinctEnabled) {
       // Check if any measure/aggregate columns and distinct clause used in
       // select tree. If not, update selectAST token "SELECT" to "SELECT DISTINCT"
       if (!hasMeasures(cubeql, cubeql.getSelectAST()) && !isDistinctClauseUsed(cubeql.getSelectAST())

http://git-wip-us.apache.org/repos/asf/lens/blob/c1790813/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
index a48d753..35234a1 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
@@ -93,6 +93,9 @@ public class TestAggregateResolver extends TestQueryRewrite {
 
     String q10 = "SELECT cityid, round(testCube.msr2) from testCube where " + TWO_DAYS_RANGE;
 
+    //dimension selected with having
+    String q11 = "SELECT cityid from testCube where " + TWO_DAYS_RANGE + " having (testCube.msr2 > 100)";
+
     String expectedq1 =
       getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2) from ", null,
         "group by testcube.cityid", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
@@ -128,13 +131,17 @@ public class TestAggregateResolver extends TestQueryRewrite {
     String expectedq10 =
       getExpectedQuery(cubeName, "SELECT testcube.cityid," + " round(sum(testCube.msr2)) from ", null,
         "group by testcube.cityid", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-
+    String expectedq11 =
+      getExpectedQuery(cubeName, "SELECT testcube.cityid from ", null,
+        "group by testcube.cityid" + "having sum(testCube.msr2) > 100",
+              getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
     String[] tests = {
-      q1, q2, q3, q4, q5, q6, q7, q8, q9, q10,
+      q1, q2, q3, q4, q5, q6, q7, q8, q9, q10, q11,
     };
     String[] expected = {
       expectedq1, expectedq2, expectedq3, expectedq4, expectedq5,
       expectedq6, expectedq7, expectedq8, expectedq9, expectedq10,
+      expectedq11,
     };
 
     for (int i = 0; i < tests.length; i++) {


[04/51] [abbrv] lens git commit: LENS-885: Cleanup of Cube test cases

Posted by de...@apache.org.
LENS-885: Cleanup of Cube test cases


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/7c7c86da
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/7c7c86da
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/7c7c86da

Branch: refs/heads/current-release-line
Commit: 7c7c86daed2e9907bda92f5ed29e73ed99a9a726
Parents: 7e9e47e
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Fri Dec 11 18:40:59 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Fri Dec 11 18:41:00 2015 +0530

----------------------------------------------------------------------
 .../apache/lens/cube/metadata/CubeColumn.java   |   1 -
 .../lens/cube/metadata/CubeFactTable.java       |   1 -
 .../org/apache/lens/cube/metadata/DateUtil.java | 396 ++++++++++++++++
 .../lens/cube/metadata/TimePartitionRange.java  |   1 -
 .../apache/lens/cube/metadata/TimeRange.java    | 219 +++++++++
 .../apache/lens/cube/metadata/UpdatePeriod.java |  84 +++-
 .../timeline/EndsAndHolesPartitionTimeline.java |   2 +-
 .../apache/lens/cube/parse/CandidateFact.java   |   5 +-
 .../cube/parse/CandidateTablePruneCause.java    |   2 +
 .../org/apache/lens/cube/parse/DateUtil.java    | 456 ------------------
 .../lens/cube/parse/ExpressionResolver.java     |  11 +-
 .../lens/cube/parse/SingleFactHQLContext.java   |   2 +-
 .../lens/cube/parse/StorageTableResolver.java   |   2 +-
 .../org/apache/lens/cube/parse/TimeRange.java   | 220 ---------
 .../lens/cube/parse/TimerangeResolver.java      |   5 +-
 .../lens/cube/metadata/CubeFactTableTest.java   |   1 -
 .../apache/lens/cube/metadata/DateFactory.java  | 196 ++++++++
 .../cube/metadata/TestCubeMetastoreClient.java  | 115 ++---
 .../apache/lens/cube/metadata/TestDateUtil.java | 297 ++++++++++++
 .../apache/lens/cube/parse/CubeTestSetup.java   | 191 ++------
 .../FieldsCannotBeQueriedTogetherTest.java      |   8 +-
 .../lens/cube/parse/TestAggregateResolver.java  |   1 +
 .../lens/cube/parse/TestBaseCubeQueries.java    |   5 +-
 .../cube/parse/TestBetweenTimeRangeWriter.java  |  25 +-
 .../lens/cube/parse/TestCubeRewriter.java       | 459 +++++++++----------
 .../apache/lens/cube/parse/TestDateUtil.java    | 299 ------------
 .../cube/parse/TestDenormalizationResolver.java |  28 +-
 .../lens/cube/parse/TestExpressionContext.java  |   4 +-
 .../lens/cube/parse/TestExpressionResolver.java |   1 +
 .../lens/cube/parse/TestJoinResolver.java       |   1 +
 .../lens/cube/parse/TestORTimeRangeWriter.java  |  40 +-
 .../lens/cube/parse/TestQueryMetrics.java       |   2 +-
 .../lens/cube/parse/TestRewriterPlan.java       |   2 +-
 .../apache/lens/cube/parse/TestStorageUtil.java |  98 ++--
 .../lens/cube/parse/TestTimeRangeExtractor.java |  33 +-
 .../lens/cube/parse/TestTimeRangeResolver.java  |   2 +-
 .../lens/cube/parse/TestTimeRangeWriter.java    |  48 +-
 .../parse/TestTimeRangeWriterWithQuery.java     | 134 +++---
 .../lens/server/query/QueryResultPurger.java    |   2 +-
 39 files changed, 1693 insertions(+), 1706 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java
index a2a00d2..b04532f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java
@@ -24,7 +24,6 @@ import java.util.Date;
 import java.util.Map;
 import java.util.TimeZone;
 
-import org.apache.lens.cube.parse.TimeRange;
 
 import com.google.common.base.Optional;
 

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
index d6bfb79..dd0adb7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
@@ -21,7 +21,6 @@ package org.apache.lens.cube.metadata;
 import java.util.*;
 
 import org.apache.lens.cube.metadata.UpdatePeriod.UpdatePeriodComparator;
-import org.apache.lens.cube.parse.DateUtil;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
new file mode 100644
index 0000000..b76c567
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
@@ -0,0 +1,396 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata;
+
+import static java.util.Calendar.MONTH;
+
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.time.DateUtils;
+
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+import lombok.Data;
+import lombok.EqualsAndHashCode;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public final class DateUtil {
+  private DateUtil() {
+
+  }
+
+  /*
+   * NOW -> new java.util.Date() NOW-7DAY -> a date one week earlier NOW (+-)
+   * <NUM>UNIT or Hardcoded dates in DD-MM-YYYY hh:mm:ss,sss
+   */
+  public static final String UNIT;
+
+  static {
+    StringBuilder sb = new StringBuilder();
+    String sep = "";
+    for (UpdatePeriod up : UpdatePeriod.values()) {
+      sb.append(sep).append(up.getUnitName());
+      sep = "|";
+    }
+    UNIT = sb.toString();
+  }
+
+  public static final String GRANULARITY = "\\.(" + UNIT + ")";
+  public static final String RELATIVE = "(now)(" + GRANULARITY + ")?";
+  public static final Pattern P_RELATIVE = Pattern.compile(RELATIVE, Pattern.CASE_INSENSITIVE);
+
+  public static final String WSPACE = "\\s+";
+  public static final String OPTIONAL_WSPACE = "\\s*";
+
+  public static final String SIGNAGE = "\\+|\\-";
+  public static final Pattern P_SIGNAGE = Pattern.compile(SIGNAGE);
+
+  public static final String QUANTITY = "\\d+";
+  public static final Pattern P_QUANTITY = Pattern.compile(QUANTITY);
+
+  public static final Pattern P_UNIT = Pattern.compile(UNIT, Pattern.CASE_INSENSITIVE);
+
+  public static final String RELDATE_VALIDATOR_STR = RELATIVE + OPTIONAL_WSPACE + "((" + SIGNAGE + ")" + "("
+    + WSPACE + ")?" + "(" + QUANTITY + ")" + OPTIONAL_WSPACE + "(" + UNIT + "))?" + "(s?)";
+
+  public static final Pattern RELDATE_VALIDATOR = Pattern.compile(RELDATE_VALIDATOR_STR, Pattern.CASE_INSENSITIVE);
+
+  public static final String YEAR_FMT = "[0-9]{4}";
+  public static final String MONTH_FMT = YEAR_FMT + "-[0-9]{2}";
+  public static final String DAY_FMT = MONTH_FMT + "-[0-9]{2}";
+  public static final String HOUR_FMT = DAY_FMT + "-[0-9]{2}";
+  public static final String MINUTE_FMT = HOUR_FMT + ":[0-9]{2}";
+  public static final String SECOND_FMT = MINUTE_FMT + ":[0-9]{2}";
+  public static final String MILLISECOND_FMT = SECOND_FMT + ",[0-9]{3}";
+  public static final String ABSDATE_FMT = "yyyy-MM-dd-HH:mm:ss,SSS";
+  public static final String HIVE_QUERY_DATE_FMT = "yyyy-MM-dd HH:mm:ss";
+
+  public static final ThreadLocal<DateFormat> ABSDATE_PARSER =
+    new ThreadLocal<DateFormat>() {
+      @Override
+      protected SimpleDateFormat initialValue() {
+        return new SimpleDateFormat(ABSDATE_FMT);
+      }
+    };
+  public static final ThreadLocal<DateFormat> HIVE_QUERY_DATE_PARSER =
+    new ThreadLocal<DateFormat>() {
+      @Override
+      protected SimpleDateFormat initialValue() {
+        return new SimpleDateFormat(HIVE_QUERY_DATE_FMT);
+      }
+    };
+
+  public static String getAbsDateFormatString(String str) {
+    if (str.matches(YEAR_FMT)) {
+      return str + "-01-01-00:00:00,000";
+    } else if (str.matches(MONTH_FMT)) {
+      return str + "-01-00:00:00,000";
+    } else if (str.matches(DAY_FMT)) {
+      return str + "-00:00:00,000";
+    } else if (str.matches(HOUR_FMT)) {
+      return str + ":00:00,000";
+    } else if (str.matches(MINUTE_FMT)) {
+      return str + ":00,000";
+    } else if (str.matches(SECOND_FMT)) {
+      return str + ",000";
+    } else if (str.matches(MILLISECOND_FMT)) {
+      return str;
+    }
+    throw new IllegalArgumentException("Unsupported formatting for date" + str);
+  }
+
+  public static Date resolveDate(String str, Date now) throws LensException {
+    if (RELDATE_VALIDATOR.matcher(str).matches()) {
+      return resolveRelativeDate(str, now);
+    } else {
+      return resolveAbsoluteDate(str);
+    }
+  }
+
+  public static String relativeToAbsolute(String relative) throws LensException {
+    return relativeToAbsolute(relative, new Date());
+  }
+
+  public static String relativeToAbsolute(String relative, Date now) throws LensException {
+    if (RELDATE_VALIDATOR.matcher(relative).matches()) {
+      return ABSDATE_PARSER.get().format(resolveRelativeDate(relative, now));
+    } else {
+      return relative;
+    }
+  }
+
+  static Cache<String, Date> stringToDateCache = CacheBuilder.newBuilder()
+    .expireAfterWrite(2, TimeUnit.HOURS).maximumSize(100).build();
+
+  public static Date resolveAbsoluteDate(final String str) throws LensException {
+    try {
+      return stringToDateCache.get(str, new Callable<Date>() {
+        @Override
+        public Date call() throws ParseException {
+          return ABSDATE_PARSER.get().parse(getAbsDateFormatString(str));
+        }
+      });
+    } catch (Exception e) {
+      log.error("Invalid date format. expected only {} date provided:{}", ABSDATE_FMT, str, e);
+      throw new LensException(LensCubeErrorCode.WRONG_TIME_RANGE_FORMAT.getLensErrorInfo(), ABSDATE_FMT, str);
+    }
+  }
+
+  public static Date resolveRelativeDate(String str, Date now) throws LensException {
+    if (StringUtils.isBlank(str)) {
+      throw new LensException(LensCubeErrorCode.NULL_DATE_VALUE.getLensErrorInfo());
+    }
+
+    // Resolve NOW with proper granularity
+    Calendar calendar = Calendar.getInstance();
+    calendar.setTime(now);
+
+    str = str.toLowerCase();
+    Matcher relativeMatcher = P_RELATIVE.matcher(str);
+    if (relativeMatcher.find()) {
+      String nowWithGranularity = relativeMatcher.group();
+      nowWithGranularity = nowWithGranularity.replaceAll("now", "");
+      nowWithGranularity = nowWithGranularity.replaceAll("\\.", "");
+
+      Matcher granularityMatcher = P_UNIT.matcher(nowWithGranularity);
+      if (granularityMatcher.find()) {
+        calendar = UpdatePeriod.fromUnitName(granularityMatcher.group().toLowerCase()).truncate(calendar);
+      }
+    }
+
+    // Get rid of 'now' part and whitespace
+    String diffStr = str.replaceAll(RELATIVE, "").replace(WSPACE, "");
+    TimeDiff diff = TimeDiff.parseFrom(diffStr);
+    return diff.offsetFrom(calendar.getTime());
+  }
+
+  public static Date getCeilDate(Date date, UpdatePeriod interval) {
+    return interval.getCeilDate(date);
+  }
+
+  public static Date getFloorDate(Date date, UpdatePeriod interval) {
+    return interval.getFloorDate(date);
+  }
+
+  public static CoveringInfo getMonthlyCoveringInfo(Date from, Date to) {
+    // Move 'from' to end of month, unless its the first day of month
+    boolean coverable = true;
+    if (!from.equals(DateUtils.truncate(from, MONTH))) {
+      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
+      coverable = false;
+    }
+
+    // Move 'to' to beginning of next month, unless its the first day of the month
+    if (!to.equals(DateUtils.truncate(to, MONTH))) {
+      to = DateUtils.truncate(to, MONTH);
+      coverable = false;
+    }
+
+    int months = 0;
+    while (from.before(to)) {
+      from = DateUtils.addMonths(from, 1);
+      months++;
+    }
+    return new CoveringInfo(months, coverable);
+  }
+
+  public static CoveringInfo getQuarterlyCoveringInfo(Date from, Date to) {
+    CoveringInfo monthlyCoveringInfo = getMonthlyCoveringInfo(from, to);
+    if (monthlyCoveringInfo.getCountBetween() < 3) {
+      return new CoveringInfo(0, false);
+    }
+    boolean coverable = monthlyCoveringInfo.isCoverable();
+    if (!from.equals(DateUtils.truncate(from, MONTH))) {
+      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
+      coverable = false;
+    }
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(from);
+    int fromMonth = cal.get(MONTH);
+
+    // Get the start date of the quarter
+    int beginOffset = (3 - fromMonth % 3) % 3;
+    int endOffset = (monthlyCoveringInfo.getCountBetween() - beginOffset) % 3;
+    if (beginOffset > 0 || endOffset > 0) {
+      coverable = false;
+    }
+    return new CoveringInfo((monthlyCoveringInfo.getCountBetween() - beginOffset - endOffset) / 3, coverable);
+  }
+
+
+  public static CoveringInfo getYearlyCoveringInfo(Date from, Date to) {
+    CoveringInfo monthlyCoveringInfo = getMonthlyCoveringInfo(from, to);
+    if (monthlyCoveringInfo.getCountBetween() < 12) {
+      return new CoveringInfo(0, false);
+    }
+    boolean coverable = monthlyCoveringInfo.isCoverable();
+    if (!from.equals(DateUtils.truncate(from, MONTH))) {
+      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
+      coverable = false;
+    }
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(from);
+    int fromMonth = cal.get(MONTH);
+    int beginOffset = (12 - fromMonth % 12) % 12;
+    int endOffset = (monthlyCoveringInfo.getCountBetween() - beginOffset) % 12;
+    if (beginOffset > 0 || endOffset > 0) {
+      coverable = false;
+    }
+    return new CoveringInfo((monthlyCoveringInfo.getCountBetween() - beginOffset - endOffset) / 12, coverable);
+  }
+
+  public static CoveringInfo getWeeklyCoveringInfo(Date from, Date to) {
+    int dayDiff = 0;
+    Date tmpFrom = from;
+    while (tmpFrom.before(to)) {
+      tmpFrom = DateUtils.addDays(tmpFrom, 1);
+      dayDiff++;
+    }
+
+    if (dayDiff < 7) {
+      return new CoveringInfo(0, false);
+    }
+
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(from);
+    int fromDay = cal.get(Calendar.DAY_OF_WEEK);
+    cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
+    Date fromWeekStartDate = cal.getTime();
+    boolean coverable = dayDiff % 7 == 0;
+    if (fromWeekStartDate.before(from)) {
+      // Count from the start of next week
+      dayDiff -= (cal.getActualMaximum(Calendar.DAY_OF_WEEK) - (fromDay - Calendar.SUNDAY));
+      coverable = false;
+    }
+
+    return new CoveringInfo(dayDiff / 7, coverable);
+  }
+
+  static CoveringInfo getCoveringInfo(Date from, Date to, UpdatePeriod interval) {
+    switch (interval) {
+    case SECONDLY:
+    case CONTINUOUS:
+      return getMilliSecondCoveringInfo(from, to, 1000);
+    case MINUTELY:
+    case HOURLY:
+    case DAILY:
+      return getMilliSecondCoveringInfo(from, to, interval.weight());
+    case WEEKLY:
+      return getWeeklyCoveringInfo(from, to);
+    case MONTHLY:
+      return getMonthlyCoveringInfo(from, to);
+    case QUARTERLY:
+      return getQuarterlyCoveringInfo(from, to);
+    case YEARLY:
+      return getYearlyCoveringInfo(from, to);
+    default:
+      return new CoveringInfo(0, false);
+    }
+  }
+
+  private static CoveringInfo getMilliSecondCoveringInfo(Date from, Date to, long millisInInterval) {
+    long diff = to.getTime() - from.getTime();
+    return new CoveringInfo((int) (diff / millisInInterval), diff % millisInInterval == 0);
+  }
+
+  static boolean isCoverableBy(Date from, Date to, Set<UpdatePeriod> intervals) {
+    for (UpdatePeriod period : intervals) {
+      if (getCoveringInfo(from, to, period).isCoverable()) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  public static int getTimeDiff(Date fromDate, Date toDate, UpdatePeriod updatePeriod) {
+    if (fromDate.before(toDate)) {
+      return getCoveringInfo(fromDate, toDate, updatePeriod).getCountBetween();
+    } else {
+      return -getCoveringInfo(toDate, fromDate, updatePeriod).getCountBetween();
+    }
+  }
+
+  @Data
+  public static class CoveringInfo {
+    int countBetween;
+    boolean coverable;
+
+    public CoveringInfo(int countBetween, boolean coverable) {
+      this.countBetween = countBetween;
+      this.coverable = coverable;
+    }
+  }
+
+  @EqualsAndHashCode
+  public static class TimeDiff {
+    int quantity;
+    UpdatePeriod updatePeriod;
+
+    private TimeDiff(int quantity, UpdatePeriod updatePeriod) {
+      this.quantity = quantity;
+      this.updatePeriod = updatePeriod;
+    }
+
+    public static TimeDiff parseFrom(String diffStr) throws LensException {
+      // Get the relative diff part to get eventual date based on now.
+      Matcher qtyMatcher = P_QUANTITY.matcher(diffStr);
+      int qty = 1;
+      if (qtyMatcher.find()) {
+        qty = Integer.parseInt(qtyMatcher.group());
+      }
+
+      Matcher signageMatcher = P_SIGNAGE.matcher(diffStr);
+      if (signageMatcher.find()) {
+        String sign = signageMatcher.group();
+        if ("-".equals(sign)) {
+          qty = -qty;
+        }
+      }
+
+      Matcher unitMatcher = P_UNIT.matcher(diffStr);
+      if (unitMatcher.find()) {
+        return new TimeDiff(qty, UpdatePeriod.fromUnitName(unitMatcher.group().toLowerCase()));
+      }
+      return new TimeDiff(0, UpdatePeriod.CONTINUOUS);
+    }
+
+    public Date offsetFrom(Date time) {
+      return DateUtils.add(time, updatePeriod.calendarField(), quantity);
+    }
+
+    public Date negativeOffsetFrom(Date time) {
+      return DateUtils.add(time, updatePeriod.calendarField(), -quantity);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
index 01069a5..2e85111 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
@@ -21,7 +21,6 @@ package org.apache.lens.cube.metadata;
 import java.util.Date;
 import java.util.Iterator;
 
-import org.apache.lens.cube.parse.DateUtil;
 import org.apache.lens.server.api.error.LensException;
 
 import lombok.Data;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimeRange.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimeRange.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimeRange.java
new file mode 100644
index 0000000..bf6cc5c
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimeRange.java
@@ -0,0 +1,219 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata;
+
+import static org.apache.lens.cube.metadata.DateUtil.ABSDATE_PARSER;
+
+import java.util.Calendar;
+import java.util.Date;
+import java.util.TreeSet;
+
+import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+
+import lombok.Data;
+import lombok.Getter;
+
+/**
+ * Timerange data structure
+ */
+@JsonIgnoreProperties({"astNode", "parent"})
+@Data
+public class TimeRange {
+  private String partitionColumn;
+  private Date toDate;
+  private Date fromDate;
+  private ASTNode astNode;
+  private ASTNode parent;
+  private int childIndex;
+
+  public boolean isCoverableBy(TreeSet<UpdatePeriod> updatePeriods) {
+    return DateUtil.isCoverableBy(fromDate, toDate, updatePeriods);
+  }
+
+
+  public static class TimeRangeBuilder {
+    private final TimeRange range;
+
+    public TimeRangeBuilder() {
+      this.range = new TimeRange();
+    }
+
+    public TimeRangeBuilder partitionColumn(String col) {
+      range.partitionColumn = col;
+      return this;
+    }
+
+    public TimeRangeBuilder toDate(Date to) {
+      range.toDate = to;
+      return this;
+    }
+
+    public TimeRangeBuilder fromDate(Date from) {
+      range.fromDate = from;
+      return this;
+    }
+
+    public TimeRangeBuilder astNode(ASTNode node) {
+      range.astNode = node;
+      return this;
+    }
+
+    public TimeRangeBuilder parent(ASTNode parent) {
+      range.parent = parent;
+      return this;
+    }
+
+    public TimeRangeBuilder childIndex(int childIndex) {
+      range.childIndex = childIndex;
+      return this;
+    }
+
+    public TimeRange build() {
+      return range;
+    }
+  }
+
+  public static TimeRangeBuilder getBuilder() {
+    return new TimeRangeBuilder();
+  }
+
+  private TimeRange() {
+
+  }
+
+  public void validate() throws LensException {
+    if (partitionColumn == null || fromDate == null || toDate == null || fromDate.equals(toDate)) {
+      throw new LensException(LensCubeErrorCode.INVALID_TIME_RANGE.getLensErrorInfo());
+    }
+
+    if (fromDate.after(toDate)) {
+      throw new LensException(LensCubeErrorCode.FROM_AFTER_TO.getLensErrorInfo(),
+          fromDate.toString(), toDate.toString());
+    }
+  }
+
+  public String toTimeDimWhereClause() {
+    return toTimeDimWhereClause(null, partitionColumn);
+  }
+
+  public String toTimeDimWhereClause(String prefix, String column) {
+    if (StringUtils.isNotBlank(column)) {
+      column = prefix + "." + column;
+    }
+    return new StringBuilder()
+      .append(column).append(" >= '").append(DateUtil.HIVE_QUERY_DATE_PARSER.get().format(fromDate)).append("'")
+      .append(" AND ")
+      .append(column).append(" < '").append(DateUtil.HIVE_QUERY_DATE_PARSER.get().format(toDate)).append("'")
+      .toString();
+  }
+
+  @Override
+  public String toString() {
+    return partitionColumn + " [" + ABSDATE_PARSER.get().format(fromDate) + " to "
+      + ABSDATE_PARSER.get().format(toDate) + ")";
+  }
+
+  /** iterable from fromDate(including) to toDate(excluding) incrementing increment units of updatePeriod */
+  public static Iterable iterable(Date fromDate, Date toDate, UpdatePeriod updatePeriod, int increment) {
+    return TimeRange.getBuilder().fromDate(fromDate).toDate(toDate).build().iterable(updatePeriod, increment);
+  }
+
+  /** iterable from fromDate(including) incrementing increment units of updatePeriod. Do this numIters times */
+  public static Iterable iterable(Date fromDate, int numIters, UpdatePeriod updatePeriod, int increment) {
+    return TimeRange.getBuilder().fromDate(fromDate).build().iterable(updatePeriod, numIters, increment);
+  }
+
+  private Iterable iterable(UpdatePeriod updatePeriod, int numIters, int increment) {
+    return new Iterable(updatePeriod, numIters, increment);
+  }
+
+  public Iterable iterable(UpdatePeriod updatePeriod, int increment) {
+    if (increment == 0) {
+      throw new UnsupportedOperationException("Can't iterate if iteration increment is zero");
+    }
+    long numIters = DateUtil.getTimeDiff(fromDate, toDate, updatePeriod) / increment;
+    return new Iterable(updatePeriod, numIters, increment);
+  }
+
+  /** Iterable so that foreach is supported */
+  public class Iterable implements java.lang.Iterable<Date> {
+    private UpdatePeriod updatePeriod;
+    private long numIters;
+    private int increment;
+
+    public Iterable(UpdatePeriod updatePeriod, long numIters, int increment) {
+      this.updatePeriod = updatePeriod;
+      this.numIters = numIters;
+      if (this.numIters < 0) {
+        this.numIters = 0;
+      }
+      this.increment = increment;
+    }
+
+    @Override
+    public Iterator iterator() {
+      return new Iterator();
+    }
+
+    public class Iterator implements java.util.Iterator<Date> {
+      Calendar calendar;
+      // Tracks the index of the item returned after the last next() call.
+      // Index here refers to the index if the iterator were iterated and converted into a list.
+      @Getter
+      int counter = -1;
+
+      public Iterator() {
+        calendar = Calendar.getInstance();
+        calendar.setTime(fromDate);
+      }
+
+      @Override
+      public boolean hasNext() {
+        return counter < numIters - 1;
+      }
+
+      @Override
+      public Date next() {
+        Date cur = calendar.getTime();
+        updatePeriod.increment(calendar, increment);
+        counter++;
+        return cur;
+      }
+
+      public Date peekNext() {
+        return calendar.getTime();
+      }
+
+      @Override
+      public void remove() {
+        throw new UnsupportedOperationException("remove from timerange iterator");
+      }
+
+      public long getNumIters() {
+        return numIters;
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
index 4c76a69..4238066 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
@@ -278,7 +278,7 @@ public enum UpdatePeriod implements Named {
       return cal.getTime();
     case QUARTERLY:
       Date dt = DateUtils.truncate(date, this.calendarField());
-      dt.setMonth(dt.getMonth() - dt.getMonth() % 3);
+      dt.setMonth(dt.getMonth() - (dt.getMonth() % 3));
       return dt;
     default:
       return DateUtils.truncate(date, this.calendarField());
@@ -299,6 +299,86 @@ public enum UpdatePeriod implements Named {
     calendar.add(calendarField(), increment);
   }
 
+  public Date getCeilDate(Date date) {
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(date);
+    boolean hasFraction = false;
+    switch (this) {
+    case YEARLY:
+      if (cal.get(MONTH) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case MONTHLY:
+      if (cal.get(DAY_OF_MONTH) != 1) {
+        hasFraction = true;
+        break;
+      }
+    case DAILY:
+      if (cal.get(Calendar.HOUR_OF_DAY) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case HOURLY:
+      if (cal.get(Calendar.MINUTE) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case MINUTELY:
+      if (cal.get(Calendar.SECOND) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case SECONDLY:
+    case CONTINUOUS:
+      if (cal.get(Calendar.MILLISECOND) != 0) {
+        hasFraction = true;
+      }
+      break;
+    case WEEKLY:
+      if (cal.get(Calendar.DAY_OF_WEEK) != 1) {
+        hasFraction = true;
+        break;
+      }
+    }
+
+    if (hasFraction) {
+      cal.add(this.calendarField(), 1);
+      return getFloorDate(cal.getTime());
+    } else {
+      return date;
+    }
+  }
+
+  public Date getFloorDate(Date date) {
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(date);
+    switch(this) {
+    case WEEKLY:
+      cal.set(Calendar.DAY_OF_WEEK, 1);
+      break;
+    }
+    switch (this) {
+    case YEARLY:
+      cal.set(MONTH, 0);
+    case MONTHLY:
+      cal.set(DAY_OF_MONTH, 1);
+    case WEEKLY:
+      // Already covered, only here for fall through cases
+    case DAILY:
+      cal.set(Calendar.HOUR_OF_DAY, 0);
+    case HOURLY:
+      cal.set(Calendar.MINUTE, 0);
+    case MINUTELY:
+      cal.set(Calendar.SECOND, 0);
+    case SECONDLY:
+    case CONTINUOUS:
+      cal.set(Calendar.MILLISECOND, 0);
+      break;
+    }
+    return cal.getTime();
+  }
+
   public static class UpdatePeriodComparator implements Comparator<UpdatePeriod> {
     @Override
     public int compare(UpdatePeriod o1, UpdatePeriod o2) {
@@ -306,7 +386,7 @@ public enum UpdatePeriod implements Named {
         return -1;
       } else if (o1 != null && o2 == null) {
         return 1;
-      } else if (o1 == null && o2 == null) {
+      } else if (o1 == null) {
         return 0;
       } else {
         if (o1.weight > o2.weight) {

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
index 9d5e264..c588dc7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
@@ -23,8 +23,8 @@ import java.util.*;
 
 import org.apache.lens.cube.metadata.MetastoreUtil;
 import org.apache.lens.cube.metadata.TimePartition;
+import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.cube.metadata.UpdatePeriod;
-import org.apache.lens.cube.parse.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 
 import com.google.common.base.Strings;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 7f81461..1884bde 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -22,10 +22,7 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
 import java.util.*;
 
-import org.apache.lens.cube.metadata.AbstractCubeTable;
-import org.apache.lens.cube.metadata.CubeFactTable;
-import org.apache.lens.cube.metadata.CubeInterface;
-import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
 import org.apache.lens.cube.parse.HQLParser.TreeNode;
 import org.apache.lens.server.api.error.LensException;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
index 9c8b5b9..78fb21d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
@@ -22,6 +22,8 @@ import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTable
 
 import java.util.*;
 
+import org.apache.lens.cube.metadata.TimeRange;
+
 import org.codehaus.jackson.annotate.JsonWriteNullProperties;
 
 import com.google.common.collect.Lists;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
deleted file mode 100644
index cd05c68..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
+++ /dev/null
@@ -1,456 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import static java.util.Calendar.*;
-
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.Set;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.metadata.UpdatePeriod;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.time.DateUtils;
-
-import lombok.Data;
-import lombok.EqualsAndHashCode;
-import lombok.extern.slf4j.Slf4j;
-
-@Slf4j
-public final class DateUtil {
-  private DateUtil() {
-
-  }
-  /*
-   * NOW -> new java.util.Date() NOW-7DAY -> a date one week earlier NOW (+-)
-   * <NUM>UNIT or Hardcoded dates in DD-MM-YYYY hh:mm:ss,sss
-   */
-  public static final String UNIT;
-
-  static {
-    StringBuilder sb = new StringBuilder();
-    String sep = "";
-    for (UpdatePeriod up : UpdatePeriod.values()) {
-      sb.append(sep).append(up.getUnitName());
-      sep = "|";
-    }
-    UNIT = sb.toString();
-  }
-
-  public static final String GRANULARITY = "\\.(" + UNIT + ")";
-  public static final String RELATIVE = "(now)(" + GRANULARITY + ")?";
-  public static final Pattern P_RELATIVE = Pattern.compile(RELATIVE, Pattern.CASE_INSENSITIVE);
-
-  public static final String WSPACE = "\\s+";
-  public static final String OPTIONAL_WSPACE = "\\s*";
-
-  public static final String SIGNAGE = "\\+|\\-";
-  public static final Pattern P_SIGNAGE = Pattern.compile(SIGNAGE);
-
-  public static final String QUANTITY = "\\d+";
-  public static final Pattern P_QUANTITY = Pattern.compile(QUANTITY);
-
-  public static final Pattern P_UNIT = Pattern.compile(UNIT, Pattern.CASE_INSENSITIVE);
-
-  public static final String RELDATE_VALIDATOR_STR = RELATIVE + OPTIONAL_WSPACE + "((" + SIGNAGE + ")" + "("
-    + WSPACE + ")?" + "(" + QUANTITY + ")" + OPTIONAL_WSPACE + "(" + UNIT + "))?" + "(s?)";
-
-  public static final Pattern RELDATE_VALIDATOR = Pattern.compile(RELDATE_VALIDATOR_STR, Pattern.CASE_INSENSITIVE);
-
-  public static final String YEAR_FMT = "[0-9]{4}";
-  public static final String MONTH_FMT = YEAR_FMT + "-[0-9]{2}";
-  public static final String DAY_FMT = MONTH_FMT + "-[0-9]{2}";
-  public static final String HOUR_FMT = DAY_FMT + "-[0-9]{2}";
-  public static final String MINUTE_FMT = HOUR_FMT + ":[0-9]{2}";
-  public static final String SECOND_FMT = MINUTE_FMT + ":[0-9]{2}";
-  public static final String MILLISECOND_FMT = SECOND_FMT + ",[0-9]{3}";
-  public static final String ABSDATE_FMT = "yyyy-MM-dd-HH:mm:ss,SSS";
-  public static final String HIVE_QUERY_DATE_FMT = "yyyy-MM-dd HH:mm:ss";
-
-  public static final ThreadLocal<DateFormat> ABSDATE_PARSER =
-    new ThreadLocal<DateFormat>() {
-      @Override
-      protected SimpleDateFormat initialValue() {
-        return new SimpleDateFormat(ABSDATE_FMT);
-      }
-    };
-  public static final ThreadLocal<DateFormat> HIVE_QUERY_DATE_PARSER =
-    new ThreadLocal<DateFormat>() {
-      @Override
-      protected SimpleDateFormat initialValue() {
-        return new SimpleDateFormat(HIVE_QUERY_DATE_FMT);
-      }
-    };
-
-  public static String getAbsDateFormatString(String str) {
-    if (str.matches(YEAR_FMT)) {
-      return str + "-01-01-00:00:00,000";
-    } else if (str.matches(MONTH_FMT)) {
-      return str + "-01-00:00:00,000";
-    } else if (str.matches(DAY_FMT)) {
-      return str + "-00:00:00,000";
-    } else if (str.matches(HOUR_FMT)) {
-      return str + ":00:00,000";
-    } else if (str.matches(MINUTE_FMT)) {
-      return str + ":00,000";
-    } else if (str.matches(SECOND_FMT)) {
-      return str + ",000";
-    } else if (str.matches(MILLISECOND_FMT)) {
-      return str;
-    }
-    throw new IllegalArgumentException("Unsupported formatting for date" + str);
-  }
-
-  public static Date resolveDate(String str, Date now) throws LensException {
-    if (RELDATE_VALIDATOR.matcher(str).matches()) {
-      return resolveRelativeDate(str, now);
-    } else {
-      return resolveAbsoluteDate(str);
-    }
-  }
-
-  public static String relativeToAbsolute(String relative) throws LensException {
-    return relativeToAbsolute(relative, new Date());
-  }
-
-  public static String relativeToAbsolute(String relative, Date now) throws LensException {
-    if (RELDATE_VALIDATOR.matcher(relative).matches()) {
-      return ABSDATE_PARSER.get().format(resolveRelativeDate(relative, now));
-    } else {
-      return relative;
-    }
-  }
-
-  public static Date resolveAbsoluteDate(String str) throws LensException {
-    try {
-      return ABSDATE_PARSER.get().parse(getAbsDateFormatString(str));
-    } catch (ParseException e) {
-      log.error("Invalid date format. expected only {} date provided:{}", ABSDATE_FMT, str, e);
-      throw new LensException(LensCubeErrorCode.WRONG_TIME_RANGE_FORMAT.getLensErrorInfo(), ABSDATE_FMT, str);
-    }
-  }
-
-  public static Date resolveRelativeDate(String str, Date now) throws LensException {
-    if (StringUtils.isBlank(str)) {
-      throw new LensException(LensCubeErrorCode.NULL_DATE_VALUE.getLensErrorInfo());
-    }
-
-    // Resolve NOW with proper granularity
-    Calendar calendar = Calendar.getInstance();
-    calendar.setTime(now);
-
-    str = str.toLowerCase();
-    Matcher relativeMatcher = P_RELATIVE.matcher(str);
-    if (relativeMatcher.find()) {
-      String nowWithGranularity = relativeMatcher.group();
-      nowWithGranularity = nowWithGranularity.replaceAll("now", "");
-      nowWithGranularity = nowWithGranularity.replaceAll("\\.", "");
-
-      Matcher granularityMatcher = P_UNIT.matcher(nowWithGranularity);
-      if (granularityMatcher.find()) {
-        calendar = UpdatePeriod.fromUnitName(granularityMatcher.group().toLowerCase()).truncate(calendar);
-      }
-    }
-
-    // Get rid of 'now' part and whitespace
-    String diffStr = str.replaceAll(RELATIVE, "").replace(WSPACE, "");
-    TimeDiff diff = TimeDiff.parseFrom(diffStr);
-    return diff.offsetFrom(calendar.getTime());
-  }
-
-  public static Date getCeilDate(Date fromDate, UpdatePeriod interval) {
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(fromDate);
-    boolean hasFraction = false;
-    switch (interval) {
-    case YEARLY:
-      if (cal.get(MONTH) != 0) {
-        hasFraction = true;
-        break;
-      }
-    case MONTHLY:
-      if (cal.get(DAY_OF_MONTH) != 1) {
-        hasFraction = true;
-        break;
-      }
-    case DAILY:
-      if (cal.get(Calendar.HOUR_OF_DAY) != 0) {
-        hasFraction = true;
-        break;
-      }
-    case HOURLY:
-      if (cal.get(Calendar.MINUTE) != 0) {
-        hasFraction = true;
-        break;
-      }
-    case MINUTELY:
-      if (cal.get(Calendar.SECOND) != 0) {
-        hasFraction = true;
-        break;
-      }
-    case SECONDLY:
-    case CONTINUOUS:
-      if (cal.get(Calendar.MILLISECOND) != 0) {
-        hasFraction = true;
-      }
-      break;
-    case WEEKLY:
-      if (cal.get(Calendar.DAY_OF_WEEK) != 1) {
-        hasFraction = true;
-        break;
-      }
-    }
-
-    if (hasFraction) {
-      cal.add(interval.calendarField(), 1);
-      return getFloorDate(cal.getTime(), interval);
-    } else {
-      return fromDate;
-    }
-  }
-
-  public static Date getFloorDate(Date toDate, UpdatePeriod interval) {
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(toDate);
-    switch (interval) {
-    case YEARLY:
-      cal.set(MONTH, 0);
-    case MONTHLY:
-      cal.set(DAY_OF_MONTH, 1);
-    case DAILY:
-      cal.set(Calendar.HOUR_OF_DAY, 0);
-    case HOURLY:
-      cal.set(Calendar.MINUTE, 0);
-    case MINUTELY:
-      cal.set(Calendar.SECOND, 0);
-    case SECONDLY:
-    case CONTINUOUS:
-      cal.set(Calendar.MILLISECOND, 0);
-      break;
-    case WEEKLY:
-      cal.set(Calendar.DAY_OF_WEEK, 1);
-      cal.set(Calendar.HOUR_OF_DAY, 0);
-      cal.set(Calendar.MINUTE, 0);
-      cal.set(Calendar.SECOND, 0);
-      cal.set(Calendar.MILLISECOND, 0);
-      break;
-    }
-    return cal.getTime();
-  }
-
-  public static CoveringInfo getMonthlyCoveringInfo(Date from, Date to) {
-    // Move 'from' to end of month, unless its the first day of month
-    boolean coverable = true;
-    if (!from.equals(DateUtils.truncate(from, MONTH))) {
-      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
-      coverable = false;
-    }
-
-    // Move 'to' to beginning of next month, unless its the first day of the month
-    if (!to.equals(DateUtils.truncate(to, MONTH))) {
-      to = DateUtils.truncate(to, MONTH);
-      coverable = false;
-    }
-
-    int months = 0;
-    while (from.before(to)) {
-      from = DateUtils.addMonths(from, 1);
-      months++;
-    }
-    return new CoveringInfo(months, coverable);
-  }
-
-  public static CoveringInfo getQuarterlyCoveringInfo(Date from, Date to) {
-    CoveringInfo monthlyCoveringInfo = getMonthlyCoveringInfo(from, to);
-    if (monthlyCoveringInfo.getCountBetween() < 3) {
-      return new CoveringInfo(0, false);
-    }
-    boolean coverable = monthlyCoveringInfo.isCoverable();
-    if (!from.equals(DateUtils.truncate(from, MONTH))) {
-      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
-      coverable = false;
-    }
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(from);
-    int fromMonth = cal.get(MONTH);
-
-    // Get the start date of the quarter
-    int beginOffset = (3 - fromMonth % 3) % 3;
-    int endOffset = (monthlyCoveringInfo.getCountBetween() - beginOffset) % 3;
-    if (beginOffset > 0 || endOffset > 0) {
-      coverable = false;
-    }
-    return new CoveringInfo((monthlyCoveringInfo.getCountBetween() - beginOffset - endOffset) / 3, coverable);
-  }
-
-
-  public static CoveringInfo getYearlyCoveringInfo(Date from, Date to) {
-    CoveringInfo monthlyCoveringInfo = getMonthlyCoveringInfo(from, to);
-    if (monthlyCoveringInfo.getCountBetween() < 12) {
-      return new CoveringInfo(0, false);
-    }
-    boolean coverable = monthlyCoveringInfo.isCoverable();
-    if (!from.equals(DateUtils.truncate(from, MONTH))) {
-      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
-      coverable = false;
-    }
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(from);
-    int fromMonth = cal.get(MONTH);
-    int beginOffset = (12 - fromMonth % 12) % 12;
-    int endOffset = (monthlyCoveringInfo.getCountBetween() - beginOffset) % 12;
-    if (beginOffset > 0 || endOffset > 0) {
-      coverable = false;
-    }
-    return new CoveringInfo((monthlyCoveringInfo.getCountBetween() - beginOffset - endOffset) / 12, coverable);
-  }
-
-  public static CoveringInfo getWeeklyCoveringInfo(Date from, Date to) {
-    int dayDiff = 0;
-    Date tmpFrom = from;
-    while (tmpFrom.before(to)) {
-      tmpFrom = DateUtils.addDays(tmpFrom, 1);
-      dayDiff++;
-    }
-
-    if (dayDiff < 7) {
-      return new CoveringInfo(0, false);
-    }
-
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(from);
-    int fromDay = cal.get(Calendar.DAY_OF_WEEK);
-    cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
-    Date fromWeekStartDate = cal.getTime();
-    boolean coverable = dayDiff % 7 == 0;
-    if (fromWeekStartDate.before(from)) {
-      // Count from the start of next week
-      dayDiff -= (cal.getActualMaximum(Calendar.DAY_OF_WEEK) - (fromDay - Calendar.SUNDAY));
-      coverable = false;
-    }
-
-    return new CoveringInfo(dayDiff / 7, coverable);
-  }
-
-  static CoveringInfo getCoveringInfo(Date from, Date to, UpdatePeriod interval) {
-    switch (interval) {
-    case SECONDLY:
-    case CONTINUOUS:
-      return getMilliSecondCoveringInfo(from, to, 1000);
-    case MINUTELY:
-    case HOURLY:
-    case DAILY:
-      return getMilliSecondCoveringInfo(from, to, interval.weight());
-    case WEEKLY:
-      return getWeeklyCoveringInfo(from, to);
-    case MONTHLY:
-      return getMonthlyCoveringInfo(from, to);
-    case QUARTERLY:
-      return getQuarterlyCoveringInfo(from, to);
-    case YEARLY:
-      return getYearlyCoveringInfo(from, to);
-    default:
-      return new CoveringInfo(0, false);
-    }
-  }
-
-  private static CoveringInfo getMilliSecondCoveringInfo(Date from, Date to, long millisInInterval) {
-    long diff = to.getTime() - from.getTime();
-    return new CoveringInfo((int) (diff / millisInInterval), diff % millisInInterval == 0);
-  }
-
-  static boolean isCoverableBy(Date from, Date to, Set<UpdatePeriod> intervals) {
-    for (UpdatePeriod period : intervals) {
-      if (getCoveringInfo(from, to, period).isCoverable()) {
-        return true;
-      }
-    }
-    return false;
-  }
-
-  public static int getTimeDiff(Date fromDate, Date toDate, UpdatePeriod updatePeriod) {
-    if (fromDate.before(toDate)) {
-      return getCoveringInfo(fromDate, toDate, updatePeriod).getCountBetween();
-    } else {
-      return -getCoveringInfo(toDate, fromDate, updatePeriod).getCountBetween();
-    }
-  }
-
-  @Data
-  public static class CoveringInfo {
-    int countBetween;
-    boolean coverable;
-
-    public CoveringInfo(int countBetween, boolean coverable) {
-      this.countBetween = countBetween;
-      this.coverable = coverable;
-    }
-  }
-
-  @EqualsAndHashCode
-  public static class TimeDiff {
-    int quantity;
-    UpdatePeriod updatePeriod;
-
-    private TimeDiff(int quantity, UpdatePeriod updatePeriod) {
-      this.quantity = quantity;
-      this.updatePeriod = updatePeriod;
-    }
-
-    public static TimeDiff parseFrom(String diffStr) throws LensException {
-      // Get the relative diff part to get eventual date based on now.
-      Matcher qtyMatcher = P_QUANTITY.matcher(diffStr);
-      int qty = 1;
-      if (qtyMatcher.find()) {
-        qty = Integer.parseInt(qtyMatcher.group());
-      }
-
-      Matcher signageMatcher = P_SIGNAGE.matcher(diffStr);
-      if (signageMatcher.find()) {
-        String sign = signageMatcher.group();
-        if ("-".equals(sign)) {
-          qty = -qty;
-        }
-      }
-
-      Matcher unitMatcher = P_UNIT.matcher(diffStr);
-      if (unitMatcher.find()) {
-        return new TimeDiff(qty, UpdatePeriod.fromUnitName(unitMatcher.group().toLowerCase()));
-      }
-      return new TimeDiff(0, UpdatePeriod.CONTINUOUS);
-    }
-
-    public Date offsetFrom(Date time) {
-      return DateUtils.add(time, updatePeriod.calendarField(), quantity);
-    }
-
-    public Date negativeOffsetFrom(Date time) {
-      return DateUtils.add(time, updatePeriod.calendarField(), -quantity);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 200a48c..776021d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -19,18 +19,11 @@
 
 package org.apache.lens.cube.parse;
 
-import static org.apache.hadoop.hive.ql.parse.HiveParser.DOT;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TABLE_OR_COL;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
 import java.util.*;
 
-import org.apache.lens.cube.metadata.AbstractBaseTable;
-import org.apache.lens.cube.metadata.AbstractCubeTable;
-import org.apache.lens.cube.metadata.CubeColumn;
-import org.apache.lens.cube.metadata.CubeInterface;
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.cube.metadata.ExprColumn;
+import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
index 60b2dde..f7271e5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
@@ -21,7 +21,7 @@ package org.apache.lens.cube.parse;
 import java.util.Map;
 
 import org.apache.lens.cube.metadata.Dimension;
-
+import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index cc8e68c..62cc071 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -18,6 +18,7 @@
  */
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateUtil.WSPACE;
 import static org.apache.lens.cube.metadata.MetastoreUtil.getFactOrDimtableStorageTableName;
 import static org.apache.lens.cube.metadata.MetastoreUtil.getStoragetableEndTimesKey;
 import static org.apache.lens.cube.metadata.MetastoreUtil.getStoragetableStartTimesKey;
@@ -25,7 +26,6 @@ import static org.apache.lens.cube.parse.CandidateTablePruneCause.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode.PART_COL_DOES_NOT_EXIST;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode.RANGE_NOT_ANSWERABLE;
-import static org.apache.lens.cube.parse.DateUtil.WSPACE;
 import static org.apache.lens.cube.parse.StorageUtil.joinWithAnd;
 
 import java.text.DateFormat;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
deleted file mode 100644
index 7be7ace..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
+++ /dev/null
@@ -1,220 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import static org.apache.lens.cube.parse.DateUtil.ABSDATE_PARSER;
-
-import java.util.Calendar;
-import java.util.Date;
-import java.util.TreeSet;
-
-import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.metadata.UpdatePeriod;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
-
-import lombok.Data;
-import lombok.Getter;
-
-/**
- * Timerange data structure
- */
-@JsonIgnoreProperties({"astNode", "parent"})
-@Data
-public class TimeRange {
-  private String partitionColumn;
-  private Date toDate;
-  private Date fromDate;
-  private ASTNode astNode;
-  private ASTNode parent;
-  private int childIndex;
-
-  public boolean isCoverableBy(TreeSet<UpdatePeriod> updatePeriods) {
-    return DateUtil.isCoverableBy(fromDate, toDate, updatePeriods);
-  }
-
-
-  public static class TimeRangeBuilder {
-    private final TimeRange range;
-
-    public TimeRangeBuilder() {
-      this.range = new TimeRange();
-    }
-
-    public TimeRangeBuilder partitionColumn(String col) {
-      range.partitionColumn = col;
-      return this;
-    }
-
-    public TimeRangeBuilder toDate(Date to) {
-      range.toDate = to;
-      return this;
-    }
-
-    public TimeRangeBuilder fromDate(Date from) {
-      range.fromDate = from;
-      return this;
-    }
-
-    public TimeRangeBuilder astNode(ASTNode node) {
-      range.astNode = node;
-      return this;
-    }
-
-    public TimeRangeBuilder parent(ASTNode parent) {
-      range.parent = parent;
-      return this;
-    }
-
-    public TimeRangeBuilder childIndex(int childIndex) {
-      range.childIndex = childIndex;
-      return this;
-    }
-
-    public TimeRange build() {
-      return range;
-    }
-  }
-
-  public static TimeRangeBuilder getBuilder() {
-    return new TimeRangeBuilder();
-  }
-
-  private TimeRange() {
-
-  }
-
-  public void validate() throws LensException {
-    if (partitionColumn == null || fromDate == null || toDate == null || fromDate.equals(toDate)) {
-      throw new LensException(LensCubeErrorCode.INVALID_TIME_RANGE.getLensErrorInfo());
-    }
-
-    if (fromDate.after(toDate)) {
-      throw new LensException(LensCubeErrorCode.FROM_AFTER_TO.getLensErrorInfo(),
-          fromDate.toString(), toDate.toString());
-    }
-  }
-
-  public String toTimeDimWhereClause() {
-    return toTimeDimWhereClause(null, partitionColumn);
-  }
-
-  public String toTimeDimWhereClause(String prefix, String column) {
-    if (StringUtils.isNotBlank(column)) {
-      column = prefix + "." + column;
-    }
-    return new StringBuilder()
-      .append(column).append(" >= '").append(DateUtil.HIVE_QUERY_DATE_PARSER.get().format(fromDate)).append("'")
-      .append(" AND ")
-      .append(column).append(" < '").append(DateUtil.HIVE_QUERY_DATE_PARSER.get().format(toDate)).append("'")
-      .toString();
-  }
-
-  @Override
-  public String toString() {
-    return partitionColumn + " [" + ABSDATE_PARSER.get().format(fromDate) + " to "
-      + ABSDATE_PARSER.get().format(toDate) + ")";
-  }
-
-  /** iterable from fromDate(including) to toDate(excluding) incrementing increment units of updatePeriod */
-  public static Iterable iterable(Date fromDate, Date toDate, UpdatePeriod updatePeriod, int increment) {
-    return TimeRange.getBuilder().fromDate(fromDate).toDate(toDate).build().iterable(updatePeriod, increment);
-  }
-
-  /** iterable from fromDate(including) incrementing increment units of updatePeriod. Do this numIters times */
-  public static Iterable iterable(Date fromDate, int numIters, UpdatePeriod updatePeriod, int increment) {
-    return TimeRange.getBuilder().fromDate(fromDate).build().iterable(updatePeriod, numIters, increment);
-  }
-
-  private Iterable iterable(UpdatePeriod updatePeriod, int numIters, int increment) {
-    return new Iterable(updatePeriod, numIters, increment);
-  }
-
-  public Iterable iterable(UpdatePeriod updatePeriod, int increment) {
-    if (increment == 0) {
-      throw new UnsupportedOperationException("Can't iterate if iteration increment is zero");
-    }
-    long numIters = DateUtil.getTimeDiff(fromDate, toDate, updatePeriod) / increment;
-    return new Iterable(updatePeriod, numIters, increment);
-  }
-
-  /** Iterable so that foreach is supported */
-  public class Iterable implements java.lang.Iterable<Date> {
-    private UpdatePeriod updatePeriod;
-    private long numIters;
-    private int increment;
-
-    public Iterable(UpdatePeriod updatePeriod, long numIters, int increment) {
-      this.updatePeriod = updatePeriod;
-      this.numIters = numIters;
-      if (this.numIters < 0) {
-        this.numIters = 0;
-      }
-      this.increment = increment;
-    }
-
-    @Override
-    public Iterator iterator() {
-      return new Iterator();
-    }
-
-    public class Iterator implements java.util.Iterator<Date> {
-      Calendar calendar;
-      // Tracks the index of the item returned after the last next() call.
-      // Index here refers to the index if the iterator were iterated and converted into a list.
-      @Getter
-      int counter = -1;
-
-      public Iterator() {
-        calendar = Calendar.getInstance();
-        calendar.setTime(fromDate);
-      }
-
-      @Override
-      public boolean hasNext() {
-        return counter < numIters - 1;
-      }
-
-      @Override
-      public Date next() {
-        Date cur = calendar.getTime();
-        updatePeriod.increment(calendar, increment);
-        counter++;
-        return cur;
-      }
-
-      public Date peekNext() {
-        return calendar.getTime();
-      }
-
-      @Override
-      public void remove() {
-        throw new UnsupportedOperationException("remove from timerange iterator");
-      }
-
-      public long getNumIters() {
-        return numIters;
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
index f772279..1a83d09 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
@@ -27,10 +27,7 @@ import java.util.*;
 import org.apache.lens.cube.error.ColUnAvailableInTimeRange;
 import org.apache.lens.cube.error.ColUnAvailableInTimeRangeException;
 import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.metadata.AbstractCubeTable;
-import org.apache.lens.cube.metadata.CubeColumn;
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.cube.metadata.SchemaGraph;
+import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.DenormalizationResolver.ReferencedQueriedColumn;
 import org.apache.lens.server.api.error.LensException;
 

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java
index 2abc6d0..25eaaef 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java
@@ -26,7 +26,6 @@ import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.lens.cube.parse.DateUtil;
 import org.apache.lens.server.api.error.LensException;
 
 import org.testng.annotations.DataProvider;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
new file mode 100644
index 0000000..87e4ce3
--- /dev/null
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata;
+
+
+import static org.apache.lens.cube.metadata.UpdatePeriod.*;
+
+import java.text.DateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.HashMap;
+
+public class DateFactory {
+  private DateFactory() {
+
+  }
+
+  public static class DateOffsetProvider extends HashMap<Integer, Date> {
+    private final UpdatePeriod updatePeriod;
+    Calendar calendar = Calendar.getInstance();
+
+    public DateOffsetProvider(UpdatePeriod updatePeriod) {
+      this(updatePeriod, false);
+    }
+
+    public DateOffsetProvider(UpdatePeriod updatePeriod, boolean truncate) {
+      this.updatePeriod = updatePeriod;
+      Date date = calendar.getTime();
+      if (truncate) {
+        date = updatePeriod.truncate(date);
+        calendar.setTime(date);
+      }
+      put(0, date);
+    }
+
+    @Override
+    public Date get(Object key) {
+      if (!containsKey(key) && key instanceof Integer) {
+        calendar.setTime(super.get(0));
+        calendar.add(updatePeriod.calendarField(), (Integer) key);
+        put((Integer) key, calendar.getTime());
+      }
+      return super.get(key);
+    }
+  }
+
+  public static class GeneralDateOffsetProvider extends HashMap<UpdatePeriod, DateOffsetProvider> {
+    @Override
+    public DateOffsetProvider get(Object key) {
+      if (!containsKey(key) && key instanceof UpdatePeriod) {
+        UpdatePeriod up = (UpdatePeriod) key;
+        put(up, new DateOffsetProvider(up));
+      }
+      return super.get(key);
+    }
+
+    public Date get(UpdatePeriod updatePeriod, int offset) {
+      return get(updatePeriod).get(offset);
+    }
+  }
+
+  public static final GeneralDateOffsetProvider GENERAL_DATE_OFFSET_PROVIDER = new GeneralDateOffsetProvider();
+
+
+  public static Date getDateWithOffset(UpdatePeriod up, int offset) {
+    return GENERAL_DATE_OFFSET_PROVIDER.get(up, offset);
+  }
+
+  public static String getDateStringWithOffset(UpdatePeriod up, int offset) {
+    return getDateStringWithOffset(up, offset, up);
+  }
+
+  public static String getDateStringWithOffset(UpdatePeriod up, int offset, UpdatePeriod formatWith) {
+    return formatWith.format(GENERAL_DATE_OFFSET_PROVIDER.get(up, offset));
+  }
+
+  public static String getTimeRangeString(final String timeDim, final String startDate, final String endDate) {
+    return "time_range_in(" + timeDim + ", '" + startDate + "','" + endDate + "')";
+  }
+
+  public static String getTimeRangeString(final String timeDim, final UpdatePeriod updatePeriod,
+    final int startOffset, final int endOffset) {
+    return getTimeRangeString(timeDim,
+      getDateStringWithOffset(updatePeriod, startOffset), getDateStringWithOffset(updatePeriod, endOffset));
+  }
+
+  public static String getTimeRangeString(final String startDate, final String endDate) {
+    return getTimeRangeString("d_time", startDate, endDate);
+  }
+
+  public static String getTimeRangeString(final UpdatePeriod updatePeriod,
+    final int startOffset, final int endOffset) {
+    return getTimeRangeString("d_time", updatePeriod, startOffset, endOffset);
+  }
+
+  public static String getTimeRangeString(String partCol, UpdatePeriod updatePeriod, int startOffset, int endOffset,
+    UpdatePeriod formatWith) {
+    return getTimeRangeString(partCol,
+      formatWith.format(getDateWithOffset(updatePeriod, startOffset)),
+      formatWith.format(getDateWithOffset(updatePeriod, endOffset)));
+  }
+
+  public static String getTimeRangeString(String partCol, UpdatePeriod updatePeriod, int startOffset, int endOffset,
+    DateFormat formatWith) {
+    return getTimeRangeString(partCol,
+      formatWith.format(getDateWithOffset(updatePeriod, startOffset)),
+      formatWith.format(getDateWithOffset(updatePeriod, endOffset)));
+  }
+
+  public static String getTimeRangeString(UpdatePeriod updatePeriod, int startOffset, int endOffset,
+    UpdatePeriod formatWith) {
+    return getTimeRangeString("d_time", updatePeriod, startOffset, endOffset, formatWith);
+  }
+
+  public static String getTimeRangeString(UpdatePeriod updatePeriod, int startOffset, int endOffset,
+    DateFormat formatWith) {
+    return getTimeRangeString("d_time", updatePeriod, startOffset, endOffset, formatWith);
+  }
+
+  // Time Instances as Date Type
+  public static final Date NOW;
+  public static final Date TWODAYS_BACK;
+  public static final Date TWO_MONTHS_BACK;
+  public static final Date BEFORE_6_DAYS;
+  public static final Date BEFORE_4_DAYS;
+
+  // Time Ranges
+  public static final String LAST_HOUR_TIME_RANGE;
+  public static final String TWO_DAYS_RANGE;
+  public static final String TWO_DAYS_RANGE_TTD;
+  public static final String TWO_DAYS_RANGE_TTD_BEFORE_4_DAYS;
+  public static final String TWO_DAYS_RANGE_TTD2;
+  public static final String TWO_DAYS_RANGE_TTD2_BEFORE_4_DAYS;
+  public static final String TWO_DAYS_RANGE_IT;
+  public static final String THIS_YEAR_RANGE;
+  public static final String LAST_YEAR_RANGE;
+  public static final String TWO_MONTHS_RANGE_UPTO_MONTH;
+  public static final String TWO_MONTHS_RANGE_UPTO_HOURS;
+  public static final String TWO_DAYS_RANGE_BEFORE_4_DAYS;
+  private static boolean zerothHour;
+
+
+  public static boolean isZerothHour() {
+    return zerothHour;
+  }
+
+  static {
+    NOW = getDateWithOffset(HOURLY, 0);
+
+    // Figure out if current hour is 0th hour
+    zerothHour = getDateStringWithOffset(HOURLY, 0).endsWith("-00");
+
+    TWODAYS_BACK = getDateWithOffset(DAILY, -2);
+    System.out.println("Test TWODAYS_BACK:" + TWODAYS_BACK);
+
+    // two months back
+    TWO_MONTHS_BACK = getDateWithOffset(MONTHLY, -2);
+    System.out.println("Test TWO_MONTHS_BACK:" + TWO_MONTHS_BACK);
+
+    // Before 4days
+    BEFORE_4_DAYS = getDateWithOffset(DAILY, -4);
+    BEFORE_6_DAYS = getDateWithOffset(DAILY, -6);
+
+    TWO_DAYS_RANGE_BEFORE_4_DAYS = getTimeRangeString(DAILY, -6, -4, HOURLY);
+
+    TWO_DAYS_RANGE = getTimeRangeString(HOURLY, -48, 0);
+    TWO_DAYS_RANGE_TTD = getTimeRangeString("test_time_dim", DAILY, -2, 0, HOURLY);
+    TWO_DAYS_RANGE_TTD_BEFORE_4_DAYS = getTimeRangeString("test_time_dim", DAILY, -6, -4, HOURLY);
+    TWO_DAYS_RANGE_TTD2 = getTimeRangeString("test_time_dim2", DAILY, -2, 0, HOURLY);
+    TWO_DAYS_RANGE_TTD2_BEFORE_4_DAYS = getTimeRangeString("test_time_dim2", DAILY, -6, -4, HOURLY);
+    TWO_DAYS_RANGE_IT = getTimeRangeString("it", DAILY, -2, 0, HOURLY);
+    THIS_YEAR_RANGE = getTimeRangeString(YEARLY, 0, 1);
+    LAST_YEAR_RANGE = getTimeRangeString(YEARLY, -1, 0);
+    TWO_MONTHS_RANGE_UPTO_MONTH = getTimeRangeString(MONTHLY, -2, 0);
+    TWO_MONTHS_RANGE_UPTO_HOURS = getTimeRangeString(MONTHLY, -2, 0, HOURLY);
+
+    // calculate LAST_HOUR_TIME_RANGE
+    LAST_HOUR_TIME_RANGE = getTimeRangeString(HOURLY, -1, 0);
+  }
+}


[03/51] [abbrv] lens git commit: LENS-885: Cleanup of Cube test cases

Posted by de...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index e5dbde7..c6ce6ad 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -19,9 +19,9 @@
 
 package org.apache.lens.cube.metadata;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.metadata.MetastoreUtil.*;
 import static org.apache.lens.cube.metadata.UpdatePeriod.*;
-import static org.apache.lens.cube.parse.CubeTestSetup.DateOffsetProvider;
 import static org.apache.lens.server.api.util.LensUtil.getHashMap;
 
 import static org.testng.Assert.*;
@@ -35,7 +35,6 @@ import org.apache.lens.cube.metadata.timeline.EndsAndHolesPartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.PartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.StoreAllPartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.TestPartitionTimelines;
-import org.apache.lens.cube.parse.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.util.LensUtil;
 
@@ -100,11 +99,7 @@ public class TestCubeMetastoreClient {
   private static Set<ExprColumn> cubeExpressions = new HashSet<>();
   private static Set<JoinChain> joinChains = new HashSet<>();
   private static Set<ExprColumn> dimExpressions = new HashSet<>();
-  private static DateOffsetProvider dateOffsetProvider = new DateOffsetProvider(HOURLY);
 
-  public static Date getDateWithOffset(int i) {
-    return dateOffsetProvider.get(i);
-  }
 
   /**
    * Get the date partition as field schema
@@ -127,7 +122,7 @@ public class TestCubeMetastoreClient {
   private static HashMap<String, Date> getTimePartitionByOffsets(Object... args) {
     for (int i = 1; i < args.length; i += 2) {
       if (args[i] instanceof Integer) {
-        args[i] = getDateWithOffset((Integer) args[i]);
+        args[i] = getDateWithOffset(HOURLY, (Integer) args[i]);
       }
     }
     return getHashMap(args);
@@ -169,13 +164,13 @@ public class TestCubeMetastoreClient {
       new FieldSchema("msr4", "bigint", "fourth measure"), "Measure4", null, "COUNT", null));
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msrstarttime", "int", "measure with start time"),
-      "Measure With Starttime", null, null, null, getDateWithOffset(0), null, null, 0.0, 999999.0));
+      "Measure With Starttime", null, null, null, NOW, null, null, 0.0, 999999.0));
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msrendtime", "float", "measure with end time"),
-      "Measure With Endtime", null, "SUM", "RS", getDateWithOffset(0), getDateWithOffset(0), null));
+      "Measure With Endtime", null, "SUM", "RS", NOW, NOW, null));
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msrcost", "double", "measure with cost"), "Measure With cost",
-      null, "MAX", null, getDateWithOffset(0), getDateWithOffset(0), 100.0));
+      null, "MAX", null, NOW, NOW, 100.0));
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msrcost2", "bigint", "measure with cost"),
       "Measure With cost2", null, "MAX", null, null, null, 100.0, 0.0, 999999999999999999999999999.0));
@@ -234,12 +229,12 @@ public class TestCubeMetastoreClient {
 
     List<CubeDimAttribute> locationHierarchyWithStartTime = new ArrayList<>();
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("zipcode2", "int", "zip"),
-      "Zip refer2", new TableReference("zipdim", "zipcode"), getDateWithOffset(0), getDateWithOffset(0),
+      "Zip refer2", new TableReference("zipdim", "zipcode"), NOW, NOW,
       100.0, true, 1000L));
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("cityid2", "int", "city"),
-      "City refer2", new TableReference("citydim", "id"), getDateWithOffset(0), null, null));
+      "City refer2", new TableReference("citydim", "id"), NOW, null, null));
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("stateid2", "int", "state"),
-      "state refer2", new TableReference("statedim", "id"), getDateWithOffset(0), null, 100.0));
+      "state refer2", new TableReference("statedim", "id"), NOW, null, 100.0));
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("countryid2", "int", "country"),
       "Country refer2", new TableReference("countrydim", "id"), null, null, null));
     locationHierarchyWithStartTime.add(new BaseDimAttribute(new FieldSchema("regionname2", "string", "region"),
@@ -248,9 +243,10 @@ public class TestCubeMetastoreClient {
     cubeDimensions
       .add(new HierarchicalDimAttribute("location2", "localtion hierarchy2", locationHierarchyWithStartTime));
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("dim1startTime", "string", "basedim"),
-      "Dim With starttime", getDateWithOffset(0), null, 100.0));
+      "Dim With starttime", NOW, null, 100.0));
     cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim2start", "string", "ref dim"),
-      "Dim2 with starttime", new TableReference("testdim2", "id"), getDateWithOffset(0), getDateWithOffset(0), 100.0));
+      "Dim2 with starttime", new TableReference("testdim2", "id"),
+      NOW, NOW, 100.0));
 
     List<TableReference> multiRefs = new ArrayList<>();
     multiRefs.add(new TableReference("testdim2", "id"));
@@ -260,12 +256,12 @@ public class TestCubeMetastoreClient {
     cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim3", "string", "multi ref dim"), "Dim3 refer",
       multiRefs));
     cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim3start", "string", "multi ref dim"),
-      "Dim3 with starttime", multiRefs, getDateWithOffset(0), null, 100.0));
+      "Dim3 with starttime", multiRefs, NOW, null, 100.0));
 
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("region", "string", "region dim"), "region", null, null,
       null, null, regions));
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("regionstart", "string", "region dim"),
-      "Region with starttime", getDateWithOffset(0), null, 100.0, null, regions));
+      "Region with starttime", NOW, null, 100.0, null, regions));
     JoinChain zipCity = new JoinChain("cityFromZip", "Zip City", "zip city desc");
     List<TableReference> chain = new ArrayList<>();
     chain.add(new TableReference(cubeName, "zipcode"));
@@ -1317,13 +1313,12 @@ public class TestCubeMetastoreClient {
     StoreAllPartitionTimeline storeAllPartitionTimeline, UpdatePeriod updatePeriod,
     int firstOffset, int latestOffset, int... holeOffsets) throws LensException {
     Date[] holeDates = new Date[holeOffsets.length];
-    for (int i = 0; i < holeOffsets.length; i++) {
-      holeDates[i] = getDateWithOffset(holeOffsets[i]);
+    for(int i = 0; i < holeOffsets.length; i++) {
+      holeDates[i] = getDateWithOffset(HOURLY, holeOffsets[i]);
     }
     assertTimeline(endsAndHolesPartitionTimeline, storeAllPartitionTimeline, updatePeriod,
-      getDateWithOffset(firstOffset), getDateWithOffset(latestOffset), holeDates);
+      getDateWithOffset(HOURLY, firstOffset), getDateWithOffset(HOURLY, latestOffset), holeDates);
   }
-
   private void assertTimeline(EndsAndHolesPartitionTimeline endsAndHolesPartitionTimeline,
     StoreAllPartitionTimeline storeAllPartitionTimeline, UpdatePeriod updatePeriod,
     Date first, Date latest, Date... holes) throws LensException {
@@ -1397,10 +1392,10 @@ public class TestCubeMetastoreClient {
     return values;
   }
 
-  private TimePartition[] toPartitionArray(UpdatePeriod updatePeriod, Date... dates) throws LensException {
-    TimePartition[] values = new TimePartition[dates.length];
-    for (int i = 0; i < dates.length; i++) {
-      values[i] = TimePartition.of(updatePeriod, dates[i]);
+  private TimePartition[] toPartitionArray(UpdatePeriod updatePeriod, int... offsets) throws LensException {
+    TimePartition[] values = new TimePartition[offsets.length];
+    for (int i = 0; i < offsets.length; i++) {
+      values[i] = TimePartition.of(updatePeriod, getDateWithOffset(updatePeriod, offsets[i]));
     }
     return values;
   }
@@ -1709,8 +1704,7 @@ public class TestCubeMetastoreClient {
     List<Partition> parts = client.getPartitionsByFilter(storageTableName, "dt='latest'");
     assertEquals(1, parts.size());
     assertEquals(TextInputFormat.class.getCanonicalName(), parts.get(0).getInputFormatClass().getCanonicalName());
-    assertEquals(parts.get(0).getParameters().get(getLatestPartTimestampKey("dt")),
-      HOURLY.format(getDateWithOffset(0)));
+    assertEquals(parts.get(0).getParameters().get(getLatestPartTimestampKey("dt")), getDateStringWithOffset(HOURLY, 0));
 
     client.dropPartition(cubeDim.getName(), c1, timeParts, null, HOURLY);
     assertFalse(client.dimPartitionExists(cubeDim.getName(), c1, timeParts));
@@ -1769,7 +1763,7 @@ public class TestCubeMetastoreClient {
     String storageTableName = getFactOrDimtableStorageTableName(dimName, c1);
     assertFalse(client.dimTableLatestPartitionExists(storageTableName));
 
-    Map<String, Date> timePartsNow = getHashMap(getDatePartitionKey(), getDateWithOffset(0));
+    Map<String, Date> timePartsNow = getHashMap(getDatePartitionKey(), NOW);
     StoragePartitionDesc sPartSpec0 = new StoragePartitionDesc(cubeDim.getName(), timePartsNow, null, HOURLY);
 
     client.addPartition(sPartSpec0, c1);
@@ -1782,7 +1776,7 @@ public class TestCubeMetastoreClient {
     Partition latestPart = parts.get(0);
     assertEquals(latestPart.getInputFormatClass(), TextInputFormat.class);
     assertFalse(latestPart.getCols().contains(newcol));
-    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), HOURLY.format(getDateWithOffset(0)));
+    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), getDateStringWithOffset(HOURLY, 0));
 
     // Partition with different schema
     cubeDim.alterColumn(newcol);
@@ -1805,7 +1799,7 @@ public class TestCubeMetastoreClient {
     latestPart = parts.get(0);
     assertEquals(latestPart.getInputFormatClass(), SequenceFileInputFormat.class);
     assertTrue(latestPart.getCols().contains(newcol));
-    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), HOURLY.format(getDateWithOffset(1)));
+    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), getDateStringWithOffset(HOURLY, 1));
 
     // add one more partition
     Map<String, Date> timeParts2 = getTimePartitionByOffsets(getDatePartitionKey(), 2);
@@ -1824,7 +1818,7 @@ public class TestCubeMetastoreClient {
     latestPart = parts.get(0);
     assertEquals(latestPart.getInputFormatClass(), TextInputFormat.class);
     assertTrue(latestPart.getCols().contains(newcol));
-    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), HOURLY.format(getDateWithOffset(2)));
+    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), getDateStringWithOffset(HOURLY, 2));
 
     // drop the last added partition
     client.dropPartition(cubeDim.getName(), c1, timeParts2, null, HOURLY);
@@ -1835,8 +1829,7 @@ public class TestCubeMetastoreClient {
     assertEquals(parts.size(), 1);
     latestPart = parts.get(0);
     assertEquals(latestPart.getInputFormatClass(), SequenceFileInputFormat.class);
-    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")),
-      HOURLY.format(getDateWithOffset(1)));
+    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), getDateStringWithOffset(HOURLY, 1));
     assertEquals(client.getAllParts(storageTableName).size(), 3);
 
     // drop the first partition, leaving the middle.
@@ -1848,8 +1841,7 @@ public class TestCubeMetastoreClient {
     assertEquals(parts.size(), 1);
     latestPart = parts.get(0);
     assertEquals(latestPart.getInputFormatClass(), SequenceFileInputFormat.class);
-    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")),
-      HOURLY.format(getDateWithOffset(1)));
+    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), getDateStringWithOffset(HOURLY, 1));
     assertEquals(client.getAllParts(storageTableName).size(), 2);
 
     client.dropPartition(cubeDim.getName(), c1, timeParts1, null, HOURLY);
@@ -1892,52 +1884,52 @@ public class TestCubeMetastoreClient {
     Map<String, Date> timeParts = new HashMap<>();
     Map<String, String> nonTimeParts = new HashMap<>();
 
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(0));
+    timeParts.put(getDatePartitionKey(), NOW);
     nonTimeParts.put("region", "asia");
     StoragePartitionDesc sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, HOURLY);
     client.addPartition(sPartSpec, c3);
-    expectedLatestValues.put("asia", getDateWithOffset(0));
+    expectedLatestValues.put("asia", NOW);
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(-1));
+    timeParts.put(getDatePartitionKey(), getDateWithOffset(HOURLY, -1));
     nonTimeParts.put("region", "africa");
     sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, HOURLY);
     client.addPartition(sPartSpec, c3);
-    expectedLatestValues.put("asia", getDateWithOffset(0));
-    expectedLatestValues.put("africa", getDateWithOffset(-1));
+    expectedLatestValues.put("asia", NOW);
+    expectedLatestValues.put("africa", getDateWithOffset(HOURLY, -1));
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(1));
+    timeParts.put(getDatePartitionKey(), getDateWithOffset(HOURLY, 1));
     nonTimeParts.put("region", "africa");
     sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, HOURLY);
     client.addPartition(sPartSpec, c3);
-    expectedLatestValues.put("asia", getDateWithOffset(0));
-    expectedLatestValues.put("africa", getDateWithOffset(1));
+    expectedLatestValues.put("asia", NOW);
+    expectedLatestValues.put("africa", getDateWithOffset(HOURLY, 1));
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(3));
+    timeParts.put(getDatePartitionKey(), getDateWithOffset(HOURLY, 3));
     nonTimeParts.put("region", "asia");
     sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, HOURLY);
     client.addPartition(sPartSpec, c3);
-    expectedLatestValues.put("asia", getDateWithOffset(3));
-    expectedLatestValues.put("africa", getDateWithOffset(1));
+    expectedLatestValues.put("asia", getDateWithOffset(HOURLY, 3));
+    expectedLatestValues.put("africa", getDateWithOffset(HOURLY, 1));
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
     client.dropPartition(dimName, c3, timeParts, nonTimeParts, HOURLY);
-    expectedLatestValues.put("asia", getDateWithOffset(0));
-    expectedLatestValues.put("africa", getDateWithOffset(1));
+    expectedLatestValues.put("asia", NOW);
+    expectedLatestValues.put("africa", getDateWithOffset(HOURLY, 1));
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(0));
+    timeParts.put(getDatePartitionKey(), NOW);
     client.dropPartition(dimName, c3, timeParts, nonTimeParts, HOURLY);
     expectedLatestValues.remove("asia");
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
     nonTimeParts.put("region", "africa");
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(-1));
+    timeParts.put(getDatePartitionKey(), getDateWithOffset(HOURLY, -1));
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(3));
+    timeParts.put(getDatePartitionKey(), getDateWithOffset(HOURLY, 3));
     nonTimeParts.remove("africa");
     assertLatestForRegions(storageTableName, expectedLatestValues);
   }
@@ -2016,8 +2008,7 @@ public class TestCubeMetastoreClient {
     String c1TableName = getFactOrDimtableStorageTableName(cubeDim.getName(), c1);
     assertEquals(client.getAllParts(c1TableName).size(), 8);
 
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(0), getDateWithOffset(0), getDateWithOffset(1)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 0, 0, 1));
 
     Map<String, Date> timeParts4 = getTimePartitionByOffsets(getDatePartitionKey(), 0, itPart.getName(), 1,
       etPart.getName(), -1);
@@ -2030,8 +2021,7 @@ public class TestCubeMetastoreClient {
     client.addPartitions(Arrays.asList(partSpec4, partSpec5), c1);
 
     assertEquals(client.getAllParts(c1TableName).size(), 10);
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(1), getDateWithOffset(1), getDateWithOffset(1)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 1, 1, 1));
     Map<String, Date> timeParts6 = getTimePartitionByOffsets(getDatePartitionKey(), -2, itPart.getName(), -1,
       etPart.getName(), -2);
     final StoragePartitionDesc partSpec6 = new StoragePartitionDesc(cubeDim.getName(), timeParts6, null, HOURLY);
@@ -2046,28 +2036,23 @@ public class TestCubeMetastoreClient {
 
     client.addPartition(partSpec7, c1);
     assertEquals(client.getAllParts(c1TableName).size(), 12);
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(1), getDateWithOffset(1), getDateWithOffset(1)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 1, 1, 1));
 
     client.dropPartition(cubeDim.getName(), c1, timeParts5, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 11);
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(0), getDateWithOffset(1), getDateWithOffset(1)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 0, 1, 1));
 
     client.dropPartition(cubeDim.getName(), c1, timeParts7, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 10);
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(0), getDateWithOffset(1), getDateWithOffset(1)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 0, 1, 1));
 
     client.dropPartition(cubeDim.getName(), c1, timeParts2, nonTimeSpec, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 9);
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(0), getDateWithOffset(1), getDateWithOffset(0)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 0, 1, 0));
 
     client.dropPartition(cubeDim.getName(), c1, timeParts4, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 8);
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(0), getDateWithOffset(0), getDateWithOffset(0)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 0, 0, 0));
 
     client.dropPartition(cubeDim.getName(), c1, timeParts3, nonTimeSpec, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 5);

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestDateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestDateUtil.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestDateUtil.java
new file mode 100644
index 0000000..7e239f3
--- /dev/null
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestDateUtil.java
@@ -0,0 +1,297 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata;
+
+import static java.util.Calendar.DAY_OF_MONTH;
+import static java.util.Calendar.MONTH;
+
+import static org.apache.lens.cube.metadata.DateUtil.*;
+import static org.apache.lens.cube.metadata.UpdatePeriod.*;
+
+import static org.apache.commons.lang.time.DateUtils.addMilliseconds;
+
+import static org.testng.Assert.assertEquals;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.Set;
+
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang.time.DateUtils;
+
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+import com.beust.jcommander.internal.Sets;
+import com.google.common.collect.Lists;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * Unit tests for cube DateUtil class TestDateUtil.
+ */
+@Slf4j
+public class TestDateUtil {
+
+  public static final String[] TEST_PAIRS = {
+    "2013-Jan-01", "2013-Jan-31", "2013-Jan-01", "2013-May-31",
+    "2013-Jan-01", "2013-Dec-31", "2013-Feb-01", "2013-Apr-25",
+    "2012-Feb-01", "2013-Feb-01", "2011-Feb-01", "2013-Feb-01",
+    "2013-Jan-02", "2013-Feb-02", "2013-Jan-02", "2013-Mar-02",
+  };
+
+  public static final SimpleDateFormat DATE_FMT = new SimpleDateFormat("yyyy-MMM-dd");
+
+  private Date[] pairs;
+
+  @BeforeTest
+  public void setUp() {
+    pairs = new Date[TEST_PAIRS.length];
+    for (int i = 0; i < TEST_PAIRS.length; i++) {
+      try {
+        pairs[i] = DATE_FMT.parse(TEST_PAIRS[i]);
+      } catch (ParseException e) {
+        log.error("Parsing exception while setup.", e);
+      }
+    }
+  }
+
+
+  @Test
+  public void testMonthsBetween() throws Exception {
+    int i = 0;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
+      new CoveringInfo(1, true),
+      "2013-Jan-01 to 2013-Jan-31");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
+      new CoveringInfo(5, true),
+      "2013-Jan-01 to 2013-May-31");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
+      new CoveringInfo(12, true),
+      "2013-Jan-01 to 2013-Dec-31");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(2, false),
+      "2013-Feb-01 to 2013-Apr-25");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(12, true),
+      "2012-Feb-01 to 2013-Feb-01");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(24, true),
+      "2011-Feb-01 to 2013-Feb-01");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "2013-Jan-02 to 2013-Feb-02");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(1, false),
+      "2013-Jan-02 to 2013-Mar-02");
+  }
+
+  @Test
+  public void testQuartersBetween() throws Exception {
+    int i = 0;
+    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "2013-Jan-01 to 2013-Jan-31");
+
+    i += 2;
+    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(1, false),
+      "2013-Jan-01 to 2013-May-31");
+
+    i += 2;
+    assertEquals(getQuarterlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
+      new CoveringInfo(4, true),
+      "2013-Jan-01 to 2013-Dec-31");
+
+    i += 2;
+    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "2013-Feb-01 to 2013-Apr-25");
+
+    i += 2;
+    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(3, false),
+      "2012-Feb-01 to 2013-Feb-01");
+
+    i += 2;
+    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(7, false),
+      "2011-Feb-01 to 2013-Feb-01");
+  }
+
+  @Test
+  public void testYearsBetween() throws Exception {
+    int i = 0;
+    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
+
+    i += 2;
+    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
+
+    i += 2;
+    assertEquals(getYearlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
+      new CoveringInfo(1, true), ""
+        + pairs[i] + "->" + pairs[i + 1]);
+
+    i += 2;
+    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
+
+    i += 2;
+    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
+
+    i += 2;
+    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(1, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
+  }
+
+  @Test
+  public void testWeeksBetween() throws Exception {
+    CoveringInfo weeks;
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-2"));
+    assertEquals(weeks, new CoveringInfo(1, true), "2013-May-26 to 2013-Jun-2");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-3"));
+    assertEquals(weeks, new CoveringInfo(0, false), "2013-May-26 to 2013-Jun-2");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-9"));
+    assertEquals(weeks, new CoveringInfo(1, false), "2013-May-26 to 2013-Jun-2");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-1"));
+    assertEquals(weeks, new CoveringInfo(0, false), "2013-May-27 to 2013-Jun-1");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-25"), DATE_FMT.parse("2013-Jun-2"));
+    assertEquals(weeks, new CoveringInfo(1, false), "2013-May-25 to 2013-Jun-1");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-9"));
+    assertEquals(weeks, new CoveringInfo(2, true), "2013-May-26 to 2013-Jun-8");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-10"));
+    assertEquals(weeks, new CoveringInfo(2, false), "2013-May-26 to 2013-Jun-10");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2015-Dec-27"), DATE_FMT.parse("2016-Jan-03"));
+    assertEquals(weeks, new CoveringInfo(1, true), "2015-Dec-27 to 2016-Jan-03");
+  }
+
+  @Test
+  public void testNowWithGranularity() throws Exception {
+    String dateFmt = "yyyy/MM/dd-HH.mm.ss.SSS";
+    // Tuesday Sept 23, 2014, 12.02.05.500 pm
+    String testDateStr = "2014/09/23-12.02.05.500";
+    final SimpleDateFormat sdf = new SimpleDateFormat(dateFmt);
+    final Date testDate = sdf.parse(testDateStr);
+
+    System.out.print("@@ testDateStr=" + testDateStr + " parsed date=" + testDate);
+
+    // Tests without a diff, just resolve now with different granularity
+    assertEquals(testDateStr, sdf.format(resolveDate("now", testDate)));
+    assertEquals("2014/01/01-00.00.00.000", sdf.format(resolveDate("now.year", testDate)));
+    assertEquals("2014/09/01-00.00.00.000", sdf.format(resolveDate("now.month", testDate)));
+    // Start of week resolves to Sunday
+    assertEquals("2014/09/21-00.00.00.000", sdf.format(resolveDate("now.week", testDate)));
+    assertEquals("2014/09/23-00.00.00.000", sdf.format(resolveDate("now.day", testDate)));
+    assertEquals("2014/09/23-12.00.00.000", sdf.format(resolveDate("now.hour", testDate)));
+    assertEquals("2014/09/23-12.02.00.000", sdf.format(resolveDate("now.minute", testDate)));
+    assertEquals("2014/09/23-12.02.05.000", sdf.format(resolveDate("now.second", testDate)));
+
+    // Tests with a diff
+    assertEquals("2014/09/22-00.00.00.000", sdf.format(resolveDate("now.day -1day", testDate)));
+    assertEquals("2014/09/23-10.00.00.000", sdf.format(resolveDate("now.hour -2hour", testDate)));
+    assertEquals("2014/09/24-12.00.00.000", sdf.format(resolveDate("now.hour +24hour", testDate)));
+    assertEquals("2015/01/01-00.00.00.000", sdf.format(resolveDate("now.year +1year", testDate)));
+    assertEquals("2014/02/01-00.00.00.000", sdf.format(resolveDate("now.year +1month", testDate)));
+  }
+
+  @Test
+  public void testFloorDate() throws ParseException {
+    Date date = ABSDATE_PARSER.get().parse("2015-01-01-00:00:00,000");
+    Date curDate = date;
+    for (int i = 0; i < 284; i++) {
+      assertEquals(getFloorDate(curDate, YEARLY), date);
+      curDate = addMilliseconds(curDate, 111111111);
+    }
+    assertEquals(getFloorDate(curDate, YEARLY), DateUtils.addYears(date, 1));
+    assertEquals(getFloorDate(date, WEEKLY), ABSDATE_PARSER.get().parse("2014-12-28-00:00:00,000"));
+  }
+
+  @Test
+  public void testCeilDate() throws ParseException {
+    Date date = ABSDATE_PARSER.get().parse("2015-12-26-06:30:15,040");
+    assertEquals(getCeilDate(date, YEARLY), ABSDATE_PARSER.get().parse("2016-01-01-00:00:00,000"));
+    assertEquals(getCeilDate(date, MONTHLY), ABSDATE_PARSER.get().parse("2016-01-01-00:00:00,000"));
+    assertEquals(getCeilDate(date, DAILY), ABSDATE_PARSER.get().parse("2015-12-27-00:00:00,000"));
+    assertEquals(getCeilDate(date, HOURLY), ABSDATE_PARSER.get().parse("2015-12-26-07:00:00,000"));
+    assertEquals(getCeilDate(date, MINUTELY), ABSDATE_PARSER.get().parse("2015-12-26-06:31:00,000"));
+    assertEquals(getCeilDate(date, SECONDLY), ABSDATE_PARSER.get().parse("2015-12-26-06:30:16,000"));
+    assertEquals(getCeilDate(date, WEEKLY), ABSDATE_PARSER.get().parse("2015-12-27-00:00:00,000"));
+  }
+
+  @Test
+  public void testTimeDiff() throws LensException {
+    ArrayList<String> minusFourDays =
+      Lists.newArrayList("-4 days", "-4days", "-4day", "-4 day", "- 4days", "- 4 day");
+    ArrayList<String> plusFourDays =
+      Lists.newArrayList("+4 days", "4 days", "+4days", "4day", "4 day", "+ 4days", "+ 4 day", "+4 day");
+    Set<TimeDiff> diffs = Sets.newHashSet();
+    for (String diffStr : minusFourDays) {
+      diffs.add(TimeDiff.parseFrom(diffStr));
+    }
+    assertEquals(diffs.size(), 1);
+    TimeDiff minusFourDaysDiff = diffs.iterator().next();
+    assertEquals(minusFourDaysDiff.quantity, -4);
+    assertEquals(minusFourDaysDiff.updatePeriod, DAILY);
+
+    diffs.clear();
+    for (String diffStr : plusFourDays) {
+      diffs.add(TimeDiff.parseFrom(diffStr));
+    }
+    assertEquals(diffs.size(), 1);
+    TimeDiff plusFourDaysDiff = diffs.iterator().next();
+    assertEquals(plusFourDaysDiff.quantity, 4);
+    assertEquals(plusFourDaysDiff.updatePeriod, DAILY);
+    Date now = new Date();
+    assertEquals(minusFourDaysDiff.offsetFrom(plusFourDaysDiff.offsetFrom(now)), now);
+    assertEquals(plusFourDaysDiff.offsetFrom(minusFourDaysDiff.offsetFrom(now)), now);
+    assertEquals(minusFourDaysDiff.negativeOffsetFrom(now), plusFourDaysDiff.offsetFrom(now));
+    assertEquals(minusFourDaysDiff.offsetFrom(now), plusFourDaysDiff.negativeOffsetFrom(now));
+  }
+
+  @Test
+  public void testRelativeToAbsolute() throws LensException {
+    Date now = new Date();
+    Date nowDay = DateUtils.truncate(now, DAY_OF_MONTH);
+    Date nowDayMinus2Days = DateUtils.add(nowDay, DAY_OF_MONTH, -2);
+    assertEquals(relativeToAbsolute("now", now), DateUtil.ABSDATE_PARSER.get().format(now));
+    assertEquals(relativeToAbsolute("now.day", now), DateUtil.ABSDATE_PARSER.get().format(nowDay));
+    assertEquals(relativeToAbsolute("now.day - 2 days", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+    assertEquals(relativeToAbsolute("now.day - 2 day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+    assertEquals(relativeToAbsolute("now.day - 2day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+    assertEquals(relativeToAbsolute("now.day -2 day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+    assertEquals(relativeToAbsolute("now.day -2 days", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 1357035..2a50d74 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -19,14 +19,14 @@
 
 package org.apache.lens.cube.parse;
 
-import static java.util.Calendar.*;
+import static java.util.Calendar.DAY_OF_MONTH;
+import static java.util.Calendar.HOUR_OF_DAY;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.metadata.UpdatePeriod.*;
 
 import static org.testng.Assert.*;
 
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
 import java.util.*;
 
 import org.apache.lens.cube.metadata.*;
@@ -39,7 +39,6 @@ import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.time.DateUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -88,10 +87,6 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 public class CubeTestSetup {
 
-  public static final String HOUR_FMT = "yyyy-MM-dd-HH";
-  public static final SimpleDateFormat HOUR_PARSER = new SimpleDateFormat(HOUR_FMT);
-  public static final String MONTH_FMT = "yyyy-MM";
-  public static final SimpleDateFormat MONTH_PARSER = new SimpleDateFormat(MONTH_FMT);
   private Set<CubeMeasure> cubeMeasures;
   private Set<CubeDimAttribute> cubeDimensions;
   public static final String TEST_CUBE_NAME = "testCube";
@@ -101,30 +96,6 @@ public class CubeTestSetup {
   public static final String DERIVED_CUBE_NAME2 = "der2";
   public static final String DERIVED_CUBE_NAME3 = "der3";
 
-  // Time Instances as Date Type
-  public static final Date NOW;
-  public static final Date LAST_HOUR;
-  public static final Date TWODAYS_BACK;
-  public static final Date ONE_DAY_BACK;
-  public static final Date TWO_MONTHS_BACK;
-  public static final Date BEFORE_4_DAYS_START;
-  public static final Date BEFORE_4_DAYS_END;
-  public static final Date THIS_YEAR_START;
-  public static final Date THIS_YEAR_END;
-  public static final Date LAST_YEAR_START;
-  public static final Date LAST_YEAR_END;
-
-  // Time Ranges
-  public static final String LAST_HOUR_TIME_RANGE;
-  public static final String TWO_DAYS_RANGE;
-  public static final String TWO_DAYS_RANGE_TTD;
-  public static final String THIS_YEAR_RANGE;
-  public static final String LAST_YEAR_RANGE;
-  public static final String TWO_MONTHS_RANGE_UPTO_MONTH;
-  public static final String TWO_MONTHS_RANGE_UPTO_HOURS;
-  public static final String TWO_DAYS_RANGE_BEFORE_4_DAYS;
-
-  private static boolean zerothHour;
   private static String c0 = "C0";
   private static String c1 = "C1";
   private static String c2 = "C2";
@@ -134,99 +105,13 @@ public class CubeTestSetup {
   private static Map<String, String> factValidityProperties = Maps.newHashMap();
   @Getter
   private static Map<String, List<UpdatePeriod>> storageToUpdatePeriodMap = new LinkedHashMap<>();
-  public static class DateOffsetProvider extends HashMap<Integer, Date> {
-    private final UpdatePeriod updatePeriod;
-    Calendar calendar = Calendar.getInstance();
-
-    public DateOffsetProvider(UpdatePeriod updatePeriod) {
-      this.updatePeriod = updatePeriod;
-    }
-    {
-      put(0, calendar.getTime());
-    }
-
-    @Override
-    public Date get(Object key) {
-      if (!containsKey(key) && key instanceof Integer) {
-        calendar.setTime(super.get(0));
-        calendar.add(updatePeriod.calendarField(), (Integer) key);
-        put((Integer) key, calendar.getTime());
-      }
-      return super.get(key);
-    }
-  }
-
   static {
-    Calendar cal = Calendar.getInstance();
-    // Keep in sync
-    NOW = cal.getTime();
-    log.debug("Test now:{}", NOW);
-
-    // Figure out if current hour is 0th hour
-    zerothHour = (cal.get(HOUR_OF_DAY) == 0);
-
-    // Figure out last hour
-    cal.add(HOUR_OF_DAY, -1);
-    LAST_HOUR = cal.getTime();
-    log.debug("LastHour:{}", LAST_HOUR);
-
-    cal.setTime(NOW);
-    cal.add(DAY_OF_MONTH, -1);
-    ONE_DAY_BACK = cal.getTime();
-    cal.add(DAY_OF_MONTH, -1);
-    TWODAYS_BACK = cal.getTime();
-    System.out.println("Test TWODAYS_BACK:" + TWODAYS_BACK);
-
-    // two months back
-    cal.setTime(NOW);
-    cal.add(MONTH, -2);
-    TWO_MONTHS_BACK = cal.getTime();
-    System.out.println("Test TWO_MONTHS_BACK:" + TWO_MONTHS_BACK);
-
-    // Before 4days
-    cal.setTime(NOW);
-    cal.add(DAY_OF_MONTH, -4);
-    BEFORE_4_DAYS_END = cal.getTime();
-    cal.add(DAY_OF_MONTH, -2);
-    BEFORE_4_DAYS_START = cal.getTime();
-
-
-    THIS_YEAR_START = DateUtils.truncate(NOW, YEARLY.calendarField());
-    THIS_YEAR_END = DateUtils.addYears(THIS_YEAR_START, 1);
-    LAST_YEAR_START = DateUtils.addYears(THIS_YEAR_START, -1);
-    LAST_YEAR_END = THIS_YEAR_START;
-    TWO_DAYS_RANGE_BEFORE_4_DAYS =
-      "time_range_in(d_time, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-        + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')";
-
-
-    TWO_DAYS_RANGE = "time_range_in(d_time, '" + getDateUptoHours(TWODAYS_BACK) + "','" + getDateUptoHours(NOW) + "')";
-    TWO_DAYS_RANGE_TTD = "time_range_in(test_time_dim, '" + getDateUptoHours(TWODAYS_BACK) + "','"
-      + getDateUptoHours(NOW) + "')";
-    THIS_YEAR_RANGE =
-      "time_range_in(d_time, '" + getDateUptoHours(THIS_YEAR_START) + "','" + getDateUptoHours(THIS_YEAR_END) + "')";
-    LAST_YEAR_RANGE =
-      "time_range_in(d_time, '" + getDateUptoHours(LAST_YEAR_START) + "','" + getDateUptoHours(LAST_YEAR_END) + "')";
-    TWO_MONTHS_RANGE_UPTO_MONTH =
-      "time_range_in(d_time, '" + getDateUptoMonth(TWO_MONTHS_BACK) + "','" + getDateUptoMonth(NOW) + "')";
-    TWO_MONTHS_RANGE_UPTO_HOURS =
-      "time_range_in(d_time, '" + getDateUptoHours(TWO_MONTHS_BACK) + "','" + getDateUptoHours(NOW) + "')";
-
-    // calculate LAST_HOUR_TIME_RANGE
-    LAST_HOUR_TIME_RANGE = getTimeRangeString(getDateUptoHours(LAST_HOUR), getDateUptoHours(NOW));
     factValidityProperties.put(MetastoreConstants.FACT_RELATIVE_START_TIME, "now.year - 90 days");
   }
 
-  public static boolean isZerothHour() {
-    return zerothHour;
-  }
 
   public static String getDateUptoHours(Date dt) {
-    return HOUR_PARSER.format(dt);
-  }
-
-  public static String getDateUptoMonth(Date dt) {
-    return MONTH_PARSER.format(dt);
+    return HOURLY.format(dt);
   }
 
   interface StoragePartitionProvider {
@@ -265,23 +150,16 @@ public class CubeTestSetup {
     StringBuilder expected = new StringBuilder();
     for (Map.Entry<String, String> entry : storageTableToWhereClause.entrySet()) {
       String storageTable = entry.getKey();
-      expected.append(selExpr);
-      expected.append(storageTable);
-      expected.append(" ");
-      expected.append(cubeName);
-      expected.append(" WHERE ");
-      expected.append("(");
+      expected.append(selExpr).append(storageTable).append(" ").append(cubeName).append(" WHERE ").append("(");
       if (notLatestConditions != null) {
         for (String cond : notLatestConditions) {
           expected.append(cond).append(" AND ");
         }
       }
       if (whereExpr != null) {
-        expected.append(whereExpr);
-        expected.append(" AND ");
+        expected.append(whereExpr).append(" AND ");
       }
-      expected.append(entry.getValue());
-      expected.append(")");
+      expected.append(entry.getValue()).append(")");
       if (postWhereExpr != null) {
         expected.append(" ").append(postWhereExpr);
       }
@@ -328,27 +206,20 @@ public class CubeTestSetup {
     assertEquals(1, numTabs);
     for (Map.Entry<String, String> entry : storageTableToWhereClause.entrySet()) {
       String storageTable = entry.getKey();
-      expected.append(selExpr);
-      expected.append(storageTable);
-      expected.append(" ");
-      expected.append(cubeName);
-      expected.append(joinExpr);
-      expected.append(" WHERE ");
-      expected.append("(");
+      expected.append(selExpr).append(storageTable).append(" ").append(cubeName).append(joinExpr)
+        .append(" WHERE ").append("(");
       if (notLatestConditions != null) {
         for (String cond : notLatestConditions) {
           expected.append(cond).append(" AND ");
         }
       }
       if (whereExpr != null) {
-        expected.append(whereExpr);
-        expected.append(" AND ");
+        expected.append(whereExpr).append(" AND ");
       }
       expected.append(entry.getValue());
       if (joinWhereConds != null) {
         for (String joinEntry : joinWhereConds) {
-          expected.append(" AND ");
-          expected.append(joinEntry);
+          expected.append(" AND ").append(joinEntry);
         }
       }
       expected.append(")");
@@ -379,7 +250,7 @@ public class CubeTestSetup {
 
   public static Map<String, String> getWhereForDailyAndHourly2daysWithTimeDim(String cubeName, String timedDimension,
     Date from, Date to, String... storageTables) {
-    Map<String, String> storageTableToWhereClause = new LinkedHashMap<String, String>();
+    Map<String, String> storageTableToWhereClause = new LinkedHashMap<>();
     if (storageToUpdatePeriodMap.isEmpty()) {
       String whereClause = getWhereForDailyAndHourly2daysWithTimeDim(cubeName, timedDimension, from, to);
       storageTableToWhereClause.put(getStorageTableString(storageTables), whereClause);
@@ -398,7 +269,7 @@ public class CubeTestSetup {
   private static String getStorageTableString(String... storageTables) {
     String dbName = getDbName();
     if (!StringUtils.isBlank(dbName)) {
-      List<String> tbls = new ArrayList<String>();
+      List<String> tbls = new ArrayList<>();
       for (String tbl : storageTables) {
         tbls.add(dbName + tbl);
       }
@@ -409,10 +280,10 @@ public class CubeTestSetup {
 
   public static String getWhereForDailyAndHourly2daysWithTimeDim(String cubeName, String timedDimension, Date from,
     Date to) {
-    List<String> hourlyparts = new ArrayList<String>();
-    List<String> dailyparts = new ArrayList<String>();
+    Set<String> hourlyparts = new HashSet<>();
+    Set<String> dailyparts = new HashSet<>();
     Date dayStart;
-    if (!CubeTestSetup.isZerothHour()) {
+    if (!isZerothHour()) {
       addParts(hourlyparts, HOURLY, from, DateUtil.getCeilDate(from, DAILY));
       addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(to, DAILY),
         DateUtil.getFloorDate(to, HOURLY));
@@ -421,7 +292,7 @@ public class CubeTestSetup {
       dayStart = from;
     }
     addParts(dailyparts, DAILY, dayStart, DateUtil.getFloorDate(to, DAILY));
-    List<String> parts = new ArrayList<String>();
+    List<String> parts = new ArrayList<>();
     parts.addAll(hourlyparts);
     parts.addAll(dailyparts);
     Collections.sort(parts);
@@ -434,7 +305,7 @@ public class CubeTestSetup {
     List<String> hourlyparts = new ArrayList<String>();
     List<String> dailyparts = new ArrayList<String>();
     Date dayStart;
-    if (!CubeTestSetup.isZerothHour()) {
+    if (!isZerothHour()) {
       addParts(hourlyparts, HOURLY, from, DateUtil.getCeilDate(from, DAILY));
       addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(to, DAILY),
         DateUtil.getFloorDate(to, HOURLY));
@@ -458,7 +329,7 @@ public class CubeTestSetup {
     List<String> monthlyparts = new ArrayList<String>();
     Date dayStart = TWO_MONTHS_BACK;
     Date monthStart = TWO_MONTHS_BACK;
-    if (!CubeTestSetup.isZerothHour()) {
+    if (!isZerothHour()) {
       addParts(hourlyparts, HOURLY, TWO_MONTHS_BACK,
         DateUtil.getCeilDate(TWO_MONTHS_BACK, DAILY));
       addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(NOW, DAILY),
@@ -510,7 +381,7 @@ public class CubeTestSetup {
 
     Date dayStart = TWO_MONTHS_BACK;
     Date monthStart = TWO_MONTHS_BACK;
-    if (!CubeTestSetup.isZerothHour()) {
+    if (!isZerothHour()) {
       addParts(hourlyparts, HOURLY, TWO_MONTHS_BACK,
         DateUtil.getCeilDate(TWO_MONTHS_BACK, DAILY));
       addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(NOW, DAILY),
@@ -571,13 +442,12 @@ public class CubeTestSetup {
     return storageTableToWhereClause;
   }
 
-  public static void addParts(List<String> partitions, UpdatePeriod updatePeriod, Date from, Date to) {
-    DateFormat fmt = updatePeriod.format();
+  public static void addParts(Collection<String> partitions, UpdatePeriod updatePeriod, Date from, Date to) {
     Calendar cal = Calendar.getInstance();
     cal.setTime(from);
     Date dt = cal.getTime();
     while (dt.before(to)) {
-      String part = fmt.format(dt);
+      String part = updatePeriod.format(dt);
       cal.add(updatePeriod.calendarField(), 1);
       partitions.add(part);
       dt = cal.getTime();
@@ -1418,9 +1288,9 @@ public class CubeTestSetup {
     }
 
     // Add all hourly partitions for TWO_DAYS_RANGE_BEFORE_4_DAYS
-    cal.setTime(BEFORE_4_DAYS_START);
+    cal.setTime(BEFORE_6_DAYS);
     temp = cal.getTime();
-    while (!(temp.after(BEFORE_4_DAYS_END))) {
+    while (!(temp.after(BEFORE_4_DAYS))) {
       Map<String, Date> timeParts = new HashMap<String, Date>();
       timeParts.put("ttd", temp);
       timeParts.put("ttd2", temp);
@@ -1533,9 +1403,9 @@ public class CubeTestSetup {
     }
 
     // Add all hourly partitions for TWO_DAYS_RANGE_BEFORE_4_DAYS
-    cal.setTime(BEFORE_4_DAYS_START);
+    cal.setTime(BEFORE_6_DAYS);
     temp = cal.getTime();
-    while (!(temp.after(BEFORE_4_DAYS_END))) {
+    while (!(temp.after(BEFORE_4_DAYS))) {
       Map<String, Date> timeParts = new HashMap<String, Date>();
       timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), temp);
       StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, HOURLY);
@@ -1585,9 +1455,9 @@ public class CubeTestSetup {
     assertTimeline(client, fact.getName(), c4, HOURLY, "ttd2", ttd2StoreAll);
 
     // Add all hourly partitions for TWO_DAYS_RANGE_BEFORE_4_DAYS
-    cal.setTime(BEFORE_4_DAYS_START);
+    cal.setTime(BEFORE_6_DAYS);
     temp = cal.getTime();
-    while (!(temp.after(BEFORE_4_DAYS_END))) {
+    while (!(temp.after(BEFORE_4_DAYS))) {
       Map<String, Date> timeParts = new HashMap<String, Date>();
       timeParts.put("ttd", temp);
       timeParts.put("ttd2", temp);
@@ -2692,9 +2562,4 @@ public class CubeTestSetup {
     System.out.println("--query- " + query);
     HQLParser.printAST(HQLParser.parseHQL(query, new HiveConf()));
   }
-
-
-  private static String getTimeRangeString(final String startDate, final String endDate) {
-    return "time_range_in(d_time, '" + startDate + "','" + endDate + "')";
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
index 0fea9f1..ff7c15f 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
@@ -18,7 +18,7 @@
  */
 package org.apache.lens.cube.parse;
 
-import static org.apache.lens.cube.parse.CubeTestSetup.*;
+import static org.apache.lens.cube.metadata.DateFactory.*;
 
 import static org.testng.Assert.assertEquals;
 import static org.testng.Assert.fail;
@@ -301,8 +301,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
     Configuration queryConf = new Configuration(conf);
     queryConf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, true);
 
-    testFieldsCannotBeQueriedTogetherError("select msr4 from basecube where " + "time_range_in(d_time, '"
-        + getDateUptoHours(TWODAYS_BACK) + "','" + getDateUptoHours(CubeTestSetup.NOW) + "')",
+    testFieldsCannotBeQueriedTogetherError("select msr4 from basecube where " + TWO_DAYS_RANGE,
         Arrays.asList("d_time", "msr4"), queryConf);
   }
 
@@ -321,8 +320,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
     Configuration queryConf = new Configuration(conf);
     queryConf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, false);
 
-    testFieldsCannotBeQueriedTogetherError("select msr4 from basecube where " + "time_range_in(d_time, '"
-        + getDateUptoHours(TWODAYS_BACK) + "','" + getDateUptoHours(CubeTestSetup.NOW) + "')",
+    testFieldsCannotBeQueriedTogetherError("select msr4 from basecube where " + TWO_DAYS_RANGE,
         Arrays.asList("d_time", "msr4"), queryConf);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
index 753ca33..a48d753 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
@@ -19,6 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
 import static org.apache.lens.cube.parse.TestCubeRewriter.compareQueries;
 

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index ee84a4c..97c6d08 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -19,9 +19,10 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
+import static org.apache.lens.cube.metadata.DateUtil.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.MISSING_PARTITIONS;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
-import static org.apache.lens.cube.parse.DateUtil.*;
 import static org.apache.lens.cube.parse.TestCubeRewriter.compareQueries;
 
 import static org.apache.hadoop.hive.ql.parse.HiveParser.KW_AND;
@@ -33,6 +34,7 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.server.api.error.LensException;
@@ -47,7 +49,6 @@ import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
 import com.google.common.base.Splitter;
-
 import lombok.Getter;
 
 public class TestBaseCubeQueries extends TestQueryRewrite {

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
index 9a2493c..eeba861 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
@@ -19,12 +19,12 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.NOW;
+import static org.apache.lens.cube.metadata.DateFactory.TWODAYS_BACK;
+import static org.apache.lens.cube.metadata.UpdatePeriod.DAILY;
+
 import java.text.DateFormat;
-import java.util.ArrayList;
 import java.util.Date;
-import java.util.List;
-
-import org.apache.lens.cube.metadata.UpdatePeriod;
 
 import org.testng.Assert;
 
@@ -50,9 +50,9 @@ public class TestBetweenTimeRangeWriter extends TestTimeRangeWriter {
     String expected = null;
     if (format == null) {
       expected =
-        getBetweenClause("test", "dt", CubeTestSetup.TWODAYS_BACK, CubeTestSetup.NOW, UpdatePeriod.DAILY.format());
+        getBetweenClause("test", "dt", TWODAYS_BACK, NOW, DAILY.format());
     } else {
-      expected = getBetweenClause("test", "dt", CubeTestSetup.TWODAYS_BACK, CubeTestSetup.NOW, format);
+      expected = getBetweenClause("test", "dt", TWODAYS_BACK, NOW, format);
     }
     Assert.assertEquals(expected, whereClause);
   }
@@ -62,17 +62,4 @@ public class TestBetweenTimeRangeWriter extends TestTimeRangeWriter {
     String last = format.format(end);
     return " (" + alias + "." + colName + " BETWEEN '" + first + "' AND '" + last + "') ";
   }
-
-  @Override
-  public void validateSingle(String whereClause, DateFormat format) {
-    List<String> parts = new ArrayList<String>();
-    if (format == null) {
-      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.ONE_DAY_BACK));
-    } else {
-      parts.add(format.format(CubeTestSetup.ONE_DAY_BACK));
-    }
-
-    System.out.println("Expected :" + StorageUtil.getWherePartClause("dt", "test", parts));
-    Assert.assertEquals(whereClause, StorageUtil.getWherePartClause("dt", "test", parts));
-  }
 }