You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by pu...@apache.org on 2017/02/17 10:58:38 UTC

[1/4] lens git commit: Deleted deprecated classes, Fixed Checkstyles, Fixed test cases, Fixed duplicate projections

Repository: lens
Updated Branches:
  refs/heads/lens-1381 4af769ee3 -> 975fa2c2b


http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 2a9be16..cf937a8 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -24,6 +24,7 @@ import static org.apache.lens.cube.metadata.UpdatePeriod.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.*;
 import static org.apache.lens.cube.parse.CubeQueryConfUtil.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
+
 import static org.testng.Assert.*;
 
 import java.text.DateFormat;
@@ -51,6 +52,7 @@ import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
 import com.google.common.base.Splitter;
+import com.google.common.collect.Iterables;
 import com.google.common.collect.Sets;
 import lombok.extern.slf4j.Slf4j;
 
@@ -102,14 +104,13 @@ public class TestCubeRewriter extends TestQueryRewrite {
     System.out.println("expected " + expected);
     compareQueries(rewrittenQuery.toHQL(), expected);
 
-    //TODO union : Fact names are different. Check after MaXCoveringFactResolver.
     //test with msr2 on different fact
-//    rewrittenQuery = rewriteCtx("select SUM(msr2) from testCube where " + timeRangeString, conf);
-//    expected = "select SUM((testCube.msr2)) as `sum(msr2)` from TestQueryRewrite.c0_testFact testcube"
-//      + " WHERE ((( testcube . dt ) between  '" + from + "'  and  '" + to + "' ))";
-//    System.out.println("rewrittenQuery.toHQL() " + rewrittenQuery.toHQL());
-//    System.out.println("expected " + expected);
-//    compareQueries(rewrittenQuery.toHQL(), expected);
+    rewrittenQuery = rewriteCtx("select SUM(msr2) from testCube where " + timeRangeString, conf);
+    expected = "select SUM((testCube.msr2)) as `sum(msr2)` from TestQueryRewrite.c2_testfact testcube"
+      + " WHERE ((( testcube . dt ) between  '" + from + "'  and  '" + to + "' ))";
+    System.out.println("rewrittenQuery.toHQL() " + rewrittenQuery.toHQL());
+    System.out.println("expected " + expected);
+    compareQueries(rewrittenQuery.toHQL(), expected);
 
     //from date 6 days back
     timeRangeString = getTimeRangeString(DAILY, -6, 0, qFmt);
@@ -143,7 +144,6 @@ public class TestCubeRewriter extends TestQueryRewrite {
 //    assertNotNull(rewrittenQuery.getNonExistingParts());
   }
 
-  //TODO union: Verify after MaxCoveringFactResolver changes.
   @Test
   public void testMaxCoveringFact() throws Exception {
     Configuration conf = getConf();
@@ -189,16 +189,14 @@ public class TestCubeRewriter extends TestQueryRewrite {
     assertEquals(th.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
     NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) th;
     PruneCauses.BriefAndDetailedError pruneCauses = ne.getJsonMessage();
-    //TODO union : check the error code. Its coming as "Columns [msr2] are not present in any table"
-    //TODO union : Need to  check partition resolution flow in StorageTableResolver.
-//    int endIndex = MISSING_PARTITIONS.errorFormat.length() - 3;
-//    assertEquals(
-//      pruneCauses.getBrief().substring(0, endIndex),
-//      MISSING_PARTITIONS.errorFormat.substring(0, endIndex)
-//    );
-//    assertEquals(pruneCauses.getDetails().get("testfact").size(), 1);
-//    assertEquals(pruneCauses.getDetails().get("testfact").iterator().next().getCause(),
-//      MISSING_PARTITIONS);
+    int endIndex = MISSING_PARTITIONS.errorFormat.length() - 3;
+    assertEquals(
+      pruneCauses.getBrief().substring(0, endIndex),
+      MISSING_PARTITIONS.errorFormat.substring(0, endIndex)
+    );
+    assertEquals(pruneCauses.getDetails().get("c1_testfact").size(), 1);
+    assertEquals(pruneCauses.getDetails().get("c1_testfact").iterator().next().getCause(),
+      MISSING_PARTITIONS);
   }
 
   @Test
@@ -209,9 +207,10 @@ public class TestCubeRewriter extends TestQueryRewrite {
       getExpectedQuery(DERIVED_CUBE_NAME, "select sum(derivedCube.msr2) as `sum(msr2)` FROM ", null, null,
         getWhereForDailyAndHourly2days(DERIVED_CUBE_NAME, "C2_testfact"));
     compareQueries(rewrittenQuery.toHQL(), expected);
-    System.out.println("Non existing parts:" + rewrittenQuery.getNonExistingParts());
-    //TODO union: Check this in a better way.
-//    assertNotNull(rewrittenQuery.getNonExistingParts());
+
+    System.out.println("Non existing parts:" + ((StorageCandidate) rewrittenQuery.getCandidates().iterator().next())
+        .getNonExistingPartitions());
+    assertNotNull(((StorageCandidate) rewrittenQuery.getCandidates().iterator().next()).getNonExistingPartitions());
 
     LensException th = getLensExceptionInRewrite(
       "select SUM(msr4) from derivedCube where " + TWO_DAYS_RANGE, getConf());
@@ -414,8 +413,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
       "select cubestate.name, cubestate.countryid, msr2 from" + " testCube" + " where cubestate.countryid = 5 and "
         + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(TEST_CUBE_NAME, "select cubestate.name as `name`, " +
-          "cubestate.countryid as `countryid`, sum(testcube.msr2) as `msr2`" + " FROM ",
+      getExpectedQuery(TEST_CUBE_NAME, "select cubestate.name as `name`, "
+          + "cubestate.countryid as `countryid`, sum(testcube.msr2) as `msr2`" + " FROM ",
         " JOIN " + getDbName()
           + "c3_statetable_partitioned cubestate ON" + " testCube.stateid = cubestate.id and cubestate.dt = 'latest'",
         "cubestate.countryid=5",
@@ -448,54 +447,51 @@ public class TestCubeRewriter extends TestQueryRewrite {
         getConfWithStorages("C2"));
     compareQueries(hqlQuery, expected);
 
-    //TODO union : Wrong fact selected. Verify after MaxCoveringFactResolver changes.
     // q2
-//    hqlQuery =
-//      rewrite("select statedim.name, SUM(msr2) from" + " testCube" + " join citydim on testCube.cityid = citydim.id"
-//        + " left outer join statedim on statedim.id = citydim.stateid"
-//        + " right outer join zipdim on citydim.zipcode = zipdim.code" + " where " + TWO_DAYS_RANGE, getConf());
-//    expected =
-//      getExpectedQuery(TEST_CUBE_NAME,
-//        "select statedim.name as `name`," + " sum(testcube.msr2) as `SUM(msr2)` FROM ", "INNER JOIN " + getDbName()
-//          + "c1_citytable citydim ON testCube.cityid = citydim.id and citydim.dt='latest' LEFT OUTER JOIN "
-//          + getDbName()
-//          + "c1_statetable statedim" + " ON statedim.id = citydim.stateid AND "
-//          + "(statedim.dt = 'latest') RIGHT OUTER JOIN " + getDbName() + "c1_ziptable"
-//          + " zipdim ON citydim.zipcode = zipdim.code and zipdim.dt='latest'", null, " group by" + " statedim.name ",
-//        null,
-//        getWhereForHourly2days(TEST_CUBE_NAME, "C1_testfact2"));
-//    compareQueries(hqlQuery, expected);
-
-    //TODO union : Wrong fact selected. Verify after MaxCoveringFactResolver changes.
+    hqlQuery =
+      rewrite("select statedim.name, SUM(msr2) from" + " testCube" + " join citydim on testCube.cityid = citydim.id"
+        + " left outer join statedim on statedim.id = citydim.stateid"
+        + " right outer join zipdim on citydim.zipcode = zipdim.code" + " where " + TWO_DAYS_RANGE, getConf());
+    expected =
+      getExpectedQuery(TEST_CUBE_NAME,
+        "select statedim.name as `name`," + " sum(testcube.msr2) as `SUM(msr2)` FROM ", "INNER JOIN " + getDbName()
+          + "c1_citytable citydim ON testCube.cityid = citydim.id and citydim.dt='latest' LEFT OUTER JOIN "
+          + getDbName()
+          + "c1_statetable statedim" + " ON statedim.id = citydim.stateid AND "
+          + "(statedim.dt = 'latest') RIGHT OUTER JOIN " + getDbName() + "c1_ziptable"
+          + " zipdim ON citydim.zipcode = zipdim.code and zipdim.dt='latest'", null, " group by" + " statedim.name ",
+        null,
+        getWhereForHourly2days(TEST_CUBE_NAME, "C1_testfact2"));
+    compareQueries(hqlQuery, expected);
+
     // q3
-//    hqlQuery =
-//      rewrite("select st.name, SUM(msr2) from" + " testCube TC" + " join citydim CT on TC.cityid = CT.id"
-//        + " left outer join statedim ST on ST.id = CT.stateid"
-//        + " right outer join zipdim ZT on CT.zipcode = ZT.code" + " where " + TWO_DAYS_RANGE, getConf());
-//    expected =
-//      getExpectedQuery("tc", "select st.name as `name`," + " sum(tc.msr2) as `sum(msr2)` FROM ",
-//          " INNER JOIN " + getDbName()
-//          + "c1_citytable ct ON" + " tc.cityid = ct.id and ct.dt='latest' LEFT OUTER JOIN "
-//          + getDbName() + "c1_statetable st"
-//          + " ON st.id = ct.stateid and (st.dt = 'latest') " + "RIGHT OUTER JOIN " + getDbName() + "c1_ziptable"
-//          + " zt ON ct.zipcode = zt.code and zt.dt='latest'", null, " group by" + " st.name ", null,
-//        getWhereForHourly2days("tc", "C1_testfact2"));
-//    compareQueries(hqlQuery, expected);
-
-    //TODO union : Wrong fact selected. Verify after MaxCoveringFactResolver changes.
+    hqlQuery =
+      rewrite("select st.name, SUM(msr2) from" + " testCube TC" + " join citydim CT on TC.cityid = CT.id"
+        + " left outer join statedim ST on ST.id = CT.stateid"
+        + " right outer join zipdim ZT on CT.zipcode = ZT.code" + " where " + TWO_DAYS_RANGE, getConf());
+    expected =
+      getExpectedQuery("tc", "select st.name as `name`," + " sum(tc.msr2) as `sum(msr2)` FROM ",
+          " INNER JOIN " + getDbName()
+          + "c1_citytable ct ON" + " tc.cityid = ct.id and ct.dt='latest' LEFT OUTER JOIN "
+          + getDbName() + "c1_statetable st"
+          + " ON st.id = ct.stateid and (st.dt = 'latest') " + "RIGHT OUTER JOIN " + getDbName() + "c1_ziptable"
+          + " zt ON ct.zipcode = zt.code and zt.dt='latest'", null, " group by" + " st.name ", null,
+        getWhereForHourly2days("tc", "C1_testfact2"));
+    compareQueries(hqlQuery, expected);
+
     // q4
-//    hqlQuery =
-//      rewrite("select citydim.name, SUM(msr2) from" + " testCube"
-//        + " left outer join citydim on testCube.cityid = citydim.id"
-//        + " left outer join zipdim on citydim.zipcode = zipdim.code" + " where " + TWO_DAYS_RANGE, getConf());
-//    expected =
-//      getExpectedQuery(TEST_CUBE_NAME, "select citydim.name as `name`," + " sum(testcube.msr2)  as `sum(msr2)`FROM ",
-//          " LEFT OUTER JOIN "
-//          + getDbName() + "c1_citytable citydim ON" + " testCube.cityid = citydim.id and (citydim.dt = 'latest') "
-//          + " LEFT OUTER JOIN " + getDbName() + "c1_ziptable" + " zipdim ON citydim.zipcode = zipdim.code AND "
-//          + "(zipdim.dt = 'latest')", null, " group by" + " citydim.name ", null,
-//        getWhereForHourly2days(TEST_CUBE_NAME, "C1_testfact2"));
-//    compareQueries(hqlQuery, expected);
+    hqlQuery =
+      rewrite("select citydim.name, SUM(msr2) from" + " testCube"
+        + " left outer join citydim on testCube.cityid = citydim.id"
+        + " left outer join zipdim on citydim.zipcode = zipdim.code" + " where " + TWO_DAYS_RANGE, getConf());
+    expected =
+      getExpectedQuery(TEST_CUBE_NAME, "select citydim.name as `name`," + " sum(testcube.msr2)  as `sum(msr2)`FROM ",
+          " LEFT OUTER JOIN "
+          + getDbName() + "c1_citytable citydim ON" + " testCube.cityid = citydim.id and (citydim.dt = 'latest') "
+          + " LEFT OUTER JOIN " + getDbName() + "c1_ziptable" + " zipdim ON citydim.zipcode = zipdim.code AND "
+          + "(zipdim.dt = 'latest')", null, " group by" + " citydim.name ", null,
+        getWhereForHourly2days(TEST_CUBE_NAME, "C1_testfact2"));
+    compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " join countrydim on testCube.countryid = countrydim.id" + " where "
@@ -634,8 +630,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     Configuration conf = getConf();
     conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
 
-   String hqlQuery =
-     rewrite("select name, SUM(msr2) from" + " testCube join citydim on testCube.cityid = citydim.id where "
+    String hqlQuery =
+      rewrite("select name, SUM(msr2) from" + " testCube join citydim on testCube.cityid = citydim.id where "
         + TWO_DAYS_RANGE, conf);
     String expected =
       getExpectedQuery(TEST_CUBE_NAME, "select citydim.name as `name`, sum(testcube.msr2) as `sum(msr2)` FROM "
@@ -955,7 +951,6 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
   /* The test is to check no failure on partial data when the flag FAIL_QUERY_ON_PARTIAL_DATA is not set
    */
-  // TODO union : check after MaxCoveringFactResolver
   @Test
   public void testQueryWithMeasureWithDataCompletenessTagWithNoFailureOnPartialData() throws ParseException,
           LensException {
@@ -968,24 +963,22 @@ public class TestCubeRewriter extends TestQueryRewrite {
     compareQueries(hqlQuery, expected);
   }
 
-  // TODO union : check after MaxCoveringFactResolver
   @Test
   public void testQueryWithMeasureWithDataCompletenessPresentInMultipleFacts() throws ParseException,
-          LensException {
+      LensException {
     /*In this query a measure is used which is present in two facts with different %completeness. While resolving the
     facts, the fact with the higher dataCompletenessFactor gets picked up.*/
     Configuration conf = getConf();
     conf.setStrings(CubeQueryConfUtil.COMPLETENESS_CHECK_PART_COL, "dt");
     String hqlQuery = rewrite("select SUM(msr9) from basecube where " + TWO_DAYS_RANGE, conf);
     String expected = getExpectedQuery("basecube", "select sum(basecube.msr9) as `sum(msr9)` FROM ", null, null,
-            getWhereForHourly2days("basecube", "c1_testfact5_raw_base"));
+        getWhereForHourly2days("basecube", "c1_testfact5_raw_base"));
     compareQueries(hqlQuery, expected);
   }
 
-  // TODO union : check after MaxCoveringFactResolver
- @Test
+  @Test
   public void testCubeWhereQueryWithMeasureWithDataCompletenessAndFailIfPartialDataFlagSet() throws ParseException,
-          LensException {
+      LensException {
     /*In this query a measure is used for which dataCompletenessTag is set and the flag FAIL_QUERY_ON_PARTIAL_DATA is
     set. The partitions for the queried range are present but some of the them have incomplete data. So, the query
     throws NO_CANDIDATE_FACT_AVAILABLE Exception*/
@@ -993,15 +986,16 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.setStrings(CubeQueryConfUtil.COMPLETENESS_CHECK_PART_COL, "dt");
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
 
-    LensException e = getLensExceptionInRewrite("select SUM(msr9) from basecube where " + TWO_DAYS_RANGE, conf);
+    LensException e = getLensExceptionInRewrite("select SUM(msr9) from basecube where "
+        + TWO_DAYS_RANGE, conf);
     assertEquals(e.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
     NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e;
     PruneCauses.BriefAndDetailedError pruneCauses = ne.getJsonMessage();
     /*Since the Flag FAIL_QUERY_ON_PARTIAL_DATA is set, and thhe queried fact has incomplete data, hence, we expect the
     prune cause to be INCOMPLETE_PARTITION. The below check is to validate this.*/
     assertEquals(pruneCauses.getBrief().substring(0, INCOMPLETE_PARTITION.errorFormat.length() - 3),
-            INCOMPLETE_PARTITION.errorFormat.substring(0,
-                    INCOMPLETE_PARTITION.errorFormat.length() - 3), pruneCauses.getBrief());
+        INCOMPLETE_PARTITION.errorFormat.substring(0,
+            INCOMPLETE_PARTITION.errorFormat.length() - 3), pruneCauses.getBrief());
   }
 
   @Test
@@ -1021,7 +1015,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
         MISSING_PARTITIONS.errorFormat.length() - 3), pruneCauses.getBrief());
 
     Set<String> expectedSet =
-      Sets.newTreeSet(Arrays.asList("summary1", "summary2", "testfact2_raw", "summary3", "testfact"));
+      Sets.newTreeSet(Arrays.asList("c1_testfact2_raw", "c1_summary3", "c1_summary2",
+          "c1_summary1", "c2_testfact", "c1_testfact"));
     boolean missingPartitionCause = false;
     for (String key : pruneCauses.getDetails().keySet()) {
       Set<String> actualKeySet = Sets.newTreeSet(Splitter.on(',').split(key));
@@ -1033,12 +1028,13 @@ public class TestCubeRewriter extends TestQueryRewrite {
     }
     assertTrue(missingPartitionCause, MISSING_PARTITIONS + " error does not occur for facttables set " + expectedSet
       + " Details :" + pruneCauses.getDetails());
-    assertEquals(pruneCauses.getDetails().get("testfactmonthly").iterator().next().getCause(),
-      NO_FACT_UPDATE_PERIODS_FOR_GIVEN_RANGE);
-    assertEquals(pruneCauses.getDetails().get("testfact2").iterator().next().getCause(),
+    assertEquals(pruneCauses.getDetails().get("c1_testfact2").iterator().next().getCause(),
       MISSING_PARTITIONS);
-    assertEquals(pruneCauses.getDetails().get("cheapfact").iterator().next().getCause(),
-      NO_CANDIDATE_STORAGES);
+    /*
+    assertEquals(pruneCauses.getDetails().get("c4_testfact,c3_testfact,c3_testfact2_raw,c4_testfact2," +
+        "c99_cheapfact,c5_testfact").iterator().next().getCause(),
+      UNSUPPORTED_STORAGE);
+
     CandidateTablePruneCause cheapFactPruneCauses = pruneCauses.getDetails().get("cheapfact").iterator().next();
     assertEquals(cheapFactPruneCauses.getDimStoragePruningCauses().get("c0"),
         CandidateTablePruneCause.CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE);
@@ -1046,6 +1042,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         CandidateTablePruneCause.CandidateTablePruneCode.UNSUPPORTED_STORAGE);
     assertEquals(pruneCauses.getDetails().get("summary4").iterator().next().getCause(), TIMEDIM_NOT_SUPPORTED);
     assertTrue(pruneCauses.getDetails().get("summary4").iterator().next().getUnsupportedTimeDims().contains("d_time"));
+    */
   }
 
   @Test
@@ -1063,18 +1060,19 @@ public class TestCubeRewriter extends TestQueryRewrite {
   @Test
   public void testNoCandidateDimAvailableExceptionCompare() throws Exception {
 
-    //Max cause COLUMN_NOT_FOUND, Ordinal 9
+    //Max cause COLUMN_NOT_FOUND, Ordinal 2
     PruneCauses<CubeDimensionTable> pr1 = new PruneCauses<CubeDimensionTable>();
     pr1.addPruningMsg(new CubeDimensionTable(new Table("test", "citydim")),
-            CandidateTablePruneCause.columnNotFound("test1", "test2", "test3"));
+            CandidateTablePruneCause.columnNotFound(
+                CandidateTablePruneCause.CandidateTablePruneCode.COLUMN_NOT_FOUND, "test1", "test2", "test3"));
     NoCandidateDimAvailableException ne1 = new NoCandidateDimAvailableException(pr1);
 
-    //Max cause EXPRESSION_NOT_EVALUABLE, Ordinal 6
+    //Max cause EXPRESSION_NOT_EVALUABLE, Ordinal 14
     PruneCauses<CubeDimensionTable> pr2 = new PruneCauses<CubeDimensionTable>();
     pr2.addPruningMsg(new CubeDimensionTable(new Table("test", "citydim")),
             CandidateTablePruneCause.expressionNotEvaluable("testexp1", "testexp2"));
     NoCandidateDimAvailableException ne2 = new NoCandidateDimAvailableException(pr2);
-    assertEquals(ne1.compareTo(ne2), 3);
+    assertEquals(ne1.compareTo(ne2), -12);
   }
 
   @Test
@@ -1261,8 +1259,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
         " testcube.cityid > 100 ", " group by testcube.cityid having" + " sum(testCube.msr2) < 1000",
         getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact")),
       getExpectedQuery(TEST_CUBE_NAME, "SELECT testCube.cityid as `cityid`, sum(testCube.msr2) as `msr2`" + " FROM ",
-        " testcube.cityid > 100 ", " group by testcube.cityid having"
-          + " sum(testCube.msr2) < 1000 order by testCube.cityid asc",
+        " testcube.cityid > 100 ", " group by testCube.cityid having"
+          + " sum(testCube.msr2) < 1000 order by cityid asc",
         getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact")),
     };
     Configuration conf = getConf();
@@ -1319,8 +1317,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
         getConf());
     expected =
       getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1 as `dim1`, testcube,dim2 as `dim2`, "
-          + "count(testcube.msr4) as `count(msr4)`, sum(testcube.msr2) as `sum(msr2)`, " +
-          "max(testcube.msr3) as `msr3` FROM ", null, " group by testcube.dim1, testcube.dim2",
+          + "count(testcube.msr4) as `count(msr4)`, sum(testcube.msr2) as `sum(msr2)`, "
+          + "max(testcube.msr3) as `msr3` FROM ", null, " group by testcube.dim1, testcube.dim2",
         getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "it", "C2_summary2"),
         null);
     compareQueries(hqlQuery, expected);
@@ -1390,8 +1388,6 @@ public class TestCubeRewriter extends TestQueryRewrite {
     compareQueries(hqlQuery, expected);
   }
 
-  // TODO union : Uncomment below test after deleting CandidateFact
-  /*
   @Test
   public void testLookAhead() throws Exception {
 
@@ -1400,15 +1396,15 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.setClass(CubeQueryConfUtil.TIME_RANGE_WRITER_CLASS, AbridgedTimeRangeWriter.class, TimeRangeWriter.class);
     CubeQueryContext ctx = rewriteCtx("select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT,
       conf);
-    //assertEquals(ctx.candidateFacts.size(), 1);
-    //CandidateFact candidateFact = ctx.candidateFacts.iterator().next();
-    Set<FactPartition> partsQueried = new TreeSet<>(candidateFact.getPartsQueried());
+    assertEquals(ctx.getCandidates().size(), 1);
+    Candidate candidate = ctx.getCandidates().iterator().next();
+    Set<FactPartition> partsQueried = new TreeSet<>(((StorageCandidate)candidate).getParticipatingPartitions());
     Date ceilDay = DAILY.getCeilDate(getDateWithOffset(DAILY, -2));
     Date nextDay = DateUtils.addDays(ceilDay, 1);
     Date nextToNextDay = DateUtils.addDays(nextDay, 1);
     HashSet<String> storageTables = Sets.newHashSet();
-    for (String storageTable : candidateFact.getStorageTables()) {
-      storageTables.add(storageTable.split("\\.")[1]);
+    for (StorageCandidate sc : CandidateUtil.getStorageCandidates(candidate)) {
+      storageTables.add(sc.getName());
     }
     TreeSet<FactPartition> expectedPartsQueried = Sets.newTreeSet();
     for (TimePartition p : Iterables.concat(
@@ -1429,11 +1425,11 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.setInt(CubeQueryConfUtil.LOOK_AHEAD_PT_PARTS_PFX, 3);
     ctx = rewriteCtx("select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT,
       conf);
-    partsQueried = new TreeSet<>(ctx.candidateFacts.iterator().next().getPartsQueried());
+    partsQueried = new TreeSet<>(((StorageCandidate)ctx.getCandidates().iterator().next())
+        .getParticipatingPartitions());
     // pt does not exist beyond 1 day. So in this test, max look ahead possible is 3
     assertEquals(partsQueried, expectedPartsQueried);
   }
-  */
 
   @Test
   public void testCubeQueryWithMultipleRanges() throws Exception {
@@ -1495,7 +1491,6 @@ public class TestCubeRewriter extends TestQueryRewrite {
     compareQueries(hqlQuery, expected);
   }
 
-  //TODO union : Wrong fact selected. Verify after MaxCoveringFactResolver changes.
   @Test
   public void testJoinWithMultipleAliases() throws Exception {
     String cubeQl =
@@ -1570,7 +1565,6 @@ public class TestCubeRewriter extends TestQueryRewrite {
     }
   }
 
-  //TODO union: Verify after MaxCoveringFactResolver changes.
   @Test
   public void testTimeDimensionAndPartCol() throws Exception {
     // Test if time dimension is replaced with partition column

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
index a3bb77c..fb803a2 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
@@ -191,38 +191,44 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
       "select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE, tconf);
     NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e;
     PruneCauses.BriefAndDetailedError error = ne.getJsonMessage();
-    Assert.assertEquals(error.getBrief(), CandidateTablePruneCode.NO_CANDIDATE_STORAGES.errorFormat);
+    Assert.assertEquals(error.getBrief(), CandidateTablePruneCode.UNSUPPORTED_STORAGE.errorFormat);
 
     HashMap<String, List<CandidateTablePruneCause>> details = error.getDetails();
 
     for (Map.Entry<String, List<CandidateTablePruneCause>> entry : details.entrySet()) {
-      if (entry.getValue().equals(Arrays.asList(CandidateTablePruneCause.columnNotFound("dim2big2")))) {
+      if (entry.getValue().equals(Arrays.asList(CandidateTablePruneCause.columnNotFound(
+          CandidateTablePruneCode.COLUMN_NOT_FOUND, "dim2big2")))) {
         Set<String> expectedKeySet =
-          Sets.newTreeSet(Splitter.on(',').split("summary1,cheapfact,testfactmonthly,testfact2,testfact"));
+          Sets.newTreeSet(Splitter.on(',').split("c1_summary1, c1_testfact,c1_testfact2"));
         Assert.assertTrue(expectedKeySet.equals(Sets.newTreeSet(Splitter.on(',').split(entry.getKey()))));
       }
 
       if (entry.getValue().equals(
         Arrays.asList(new CandidateTablePruneCause(CandidateTablePruneCode.INVALID_DENORM_TABLE)))) {
         Set<String> expectedKeySet =
-          Sets.newTreeSet(Splitter.on(',').split("summary2,testfact2_raw,summary3"));
+          Sets.newTreeSet(Splitter.on(',').split("c2_summary2, c2_summary3, c1_testfact2_raw, "
+              + "c3_testfact2_raw, c1_summary3,c1_summary2"));
         Assert.assertTrue(expectedKeySet.equals(Sets.newTreeSet(Splitter.on(',').split(entry.getKey()))));
       }
 
-      if (entry.getKey().equals("testfact_continuous")) {
+      if (entry.getKey().equals("c0_testfact_continuous")) {
         Assert.assertTrue(entry.getValue().equals(
-          Arrays.asList(CandidateTablePruneCause.columnNotFound("msr2", "msr3")))
-          || entry.getValue().equals(Arrays.asList(CandidateTablePruneCause.columnNotFound("msr3", "msr2"))));
+          Arrays.asList(CandidateTablePruneCause.columnNotFound(CandidateTablePruneCode.COLUMN_NOT_FOUND,
+              "msr2", "msr3")))
+          || entry.getValue().equals(Arrays.asList(CandidateTablePruneCause.columnNotFound(
+            CandidateTablePruneCode.COLUMN_NOT_FOUND, "msr3", "msr2"))));
       }
 
-      if (entry.getKey().equals("summary4")) {
+      if (entry.getKey().equals("c2_summary2, c2_summary3, c2_summary4, c4_testfact, c2_summary1, c3_testfact, "
+          + "c3_testfact2_raw, c4_testfact2, c99_cheapfact, c5_testfact, c0_cheapfact, "
+          + "c2_testfact, c2_testfactmonth, c0_testfact")) {
         List<CandidateTablePruneCause> expectedPruneCauses =
             Arrays.asList(CandidateTablePruneCause.noCandidateStoragesForDimtable(
-          new HashMap<String, CandidateTablePruneCode>() {
-            {
-              put("C2", CandidateTablePruneCode.UNSUPPORTED_STORAGE);
-            }
-          }));
+                new HashMap<String, CandidateTablePruneCode>() {
+                  {
+                    put("C2", CandidateTablePruneCode.UNSUPPORTED_STORAGE);
+                  }
+                }));
         Assert.assertTrue(entry.getValue().equals(expectedPruneCauses));
       }
     }
@@ -279,23 +285,21 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
       "No dimension table has the queried columns " + "for citydim, columns: [name, statename, nocandidatecol]");
   }
 
-  // TODO union : Fix testcase after deleting CandidateFact
-  /*
   @Test
   public void testCubeQueryWithTwoRefCols() throws Exception {
     Configuration tConf = new Configuration(conf);
     tConf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "");
     CubeQueryContext cubeql = rewriteCtx("select dim2, test_time_dim2 from testcube where " + TWO_DAYS_RANGE, tConf);
-    Set<String> candidateFacts = new HashSet<String>();
-    for (CandidateFact cfact : cubeql.getCandidateFacts()) {
-      candidateFacts.add(cfact.getName().toLowerCase());
+    Set<String> candidates = new HashSet<String>();
+    for (Candidate cand : cubeql.getCandidates()) {
+      candidates.add(cand.toString());
     }
     // testfact contains test_time_dim_day_id, but not dim2 - it should have been removed.
-    Assert.assertFalse(candidateFacts.contains("testfact"));
+    Assert.assertFalse(candidates.contains("testfact"));
     // summary2 contains dim2, but not test_time_dim2 - it should have been removed.
-    Assert.assertFalse(candidateFacts.contains("summary2"));
+    Assert.assertFalse(candidates.contains("summary2"));
   }
-*/
+
   @Test
   public void testCubeQueryWithHourDimJoin() throws Exception {
     Configuration tConf = new Configuration(conf);
@@ -336,8 +340,8 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
       + "c1_citytable citydim on basecube.cityid = citydim.id and (citydim.dt = 'latest') "
       + " join " + getDbName() + "c1_ziptable cityzip on citydim.zipcode = cityzip.code and (cityzip.dt = 'latest')";
     String expected =
-      getExpectedQuery("basecube", "SELECT (cityzip.code) as `code`, (basecube.dim22) as `dim22`, " +
-          "(basecube.msr11) as `msr11` FROM ", joinExpr, null, null, null,
+      getExpectedQuery("basecube", "SELECT (cityzip.code) as `code`, (basecube.dim22) as `dim22`, "
+          + "(basecube.msr11) as `msr11` FROM ", joinExpr, null, null, null,
         getWhereForHourly2days("basecube", "C1_testfact2_raw_base"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
index f93a548..dd18ffd 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
@@ -91,7 +91,8 @@ public class TestExpressionResolver extends TestQueryRewrite {
     String hqlQuery = rewrite("select msr2 from testCube" + " where " + TWO_DAYS_RANGE + " and substrexpr != 'XYZ'",
       conf);
     String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) as `msr2` FROM ", null, " and substr(testCube.dim1, 3) != 'XYZ'",
+      getExpectedQuery(cubeName, "select sum(testcube.msr2) as `msr2` FROM ", null,
+          " and substr(testCube.dim1, 3) != 'XYZ'",
         getWhereForDailyAndHourly2days(cubeName, "c1_summary1"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
@@ -252,10 +253,11 @@ public class TestExpressionResolver extends TestQueryRewrite {
         + " group by booleancut having msr6 > 100.0 order by booleancut", conf);
     String expected =
       getExpectedQuery(cubeName, "SELECT (((testcube.dim1) != 'x') and ((testcube.dim2) != 10)) as `booleancut`, "
-          + "avg(((testcube.msr1) + (testcube.msr2))) as `avgmsr` FROM ", null, " and substr(testCube.dim1, 3) != 'XYZ' "
+          + "avg(((testcube.msr1) + (testcube.msr2))) as `avgmsr` FROM ", null,
+          " and substr(testCube.dim1, 3) != 'XYZ' "
           + " group by testCube.dim1 != 'x' AND testCube.dim2 != 10"
           + " having (sum(testCube.msr2) + max(testCube.msr3))/ count(testcube.msr4) > 100.0"
-          + " order by testCube.dim1 != 'x' AND testCube.dim2 != 10 asc", getWhereForHourly2days("C1_testfact2_raw"));
+          + " order by booleancut asc", getWhereForHourly2days("C1_testfact2_raw"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
   @Test
@@ -287,8 +289,8 @@ public class TestExpressionResolver extends TestQueryRewrite {
   @Test
   public void testMultipleExpressionsPickingSecondExpression() throws Exception {
     String hqlQuery = rewrite("select equalsums from testCube where " + TWO_DAYS_RANGE, conf);
-    String expected = getExpectedQuery(cubeName, "select (max(testCube.msr3) + sum(testCube.msr2))/100 " +
-        "as `equalsums` FROM ", null, null, getWhereForHourly2days(cubeName, "C1_testfact2"));
+    String expected = getExpectedQuery(cubeName, "select (max(testCube.msr3) + sum(testCube.msr2))/100 "
+        + "as `equalsums` FROM ", null, null, getWhereForHourly2days(cubeName, "C1_testfact2"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
index 0d7e8ef..f31156a 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
@@ -186,8 +186,8 @@ public class TestJoinResolver extends TestQueryRewrite {
     String query = "select cubecity.name, msr2 FROM testCube WHERE " + TWO_DAYS_RANGE;
     String hqlQuery = rewrite(query, tConf);
     // Check that aliases are preserved in the join clause
-    String expected = getExpectedQuery("testcube", "SELECT (cubecity.name) as `name`, sum((testcube.msr2)) " +
-        "as `msr2` FROM ", " left outer join " + getDbName()
+    String expected = getExpectedQuery("testcube", "SELECT (cubecity.name) as `name`, sum((testcube.msr2)) "
+        + "as `msr2` FROM ", " left outer join " + getDbName()
         + "c1_citytable cubecity ON testcube.cityid = cubecity.id and (cubecity.dt = 'latest')",
       null, " group by cubecity.name", null, getWhereForHourly2days("testcube", "c1_testfact2"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
@@ -356,9 +356,11 @@ public class TestJoinResolver extends TestQueryRewrite {
     // Single joinchain with two chains, accessed as refcolumn
     query = "select cityStateCapital, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
     hqlQuery = rewrite(query, hconf);
-    expected = getExpectedQuery("basecube", "SELECT (citystate.capital) as `citystatecapital`, sum((basecube.msr2)) as `sum(msr2)` FROM ",
-      " join " + getDbName() + "c1_citytable citydim ON baseCube.cityid = citydim.id and citydim.dt = 'latest'"
-        + " join " + getDbName() + "c1_statetable cityState ON citydim.stateid=cityState.id and cityState.dt= 'latest'",
+    expected = getExpectedQuery("basecube", "SELECT (citystate.capital) as `citystatecapital`, "
+        + "sum((basecube.msr2)) as `sum(msr2)` FROM ",
+        " join " + getDbName() + "c1_citytable citydim ON baseCube.cityid = citydim.id and citydim.dt = 'latest'"
+        + " join " + getDbName() + "c1_statetable cityState ON citydim.stateid=cityState.id "
+        + "and cityState.dt= 'latest'",
       null, "group by citystate.capital",
       null, getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
@@ -375,7 +377,7 @@ public class TestJoinResolver extends TestQueryRewrite {
         + "sum((basecube.msr2)) as `sum(msr2)` FROM ", " join "
         + getDbName() + "c1_citytable citydim ON baseCube.cityid = citydim.id and citydim.dt = 'latest'"
         + " join " + getDbName() + "c1_statetable cityState ON citydim.stateid=cityState.id and cityState.dt= 'latest'",
-      null, "group by citystate.capital order by citystate.capital asc",
+      null, "group by citystate.capital order by citystatecapital",
       null, getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
@@ -454,8 +456,8 @@ public class TestJoinResolver extends TestQueryRewrite {
     query = "select cubeStateCountry.name, cubeCityStateCountry.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
     hqlQuery = rewrite(query, hconf);
     expected = getExpectedQuery("basecube",
-      "SELECT (cubestatecountry.name) as `name`, (cubecitystatecountry.name) as `name`, sum((basecube.msr2)) as `sum(msr2)` FROM ",
-      ""
+      "SELECT (cubestatecountry.name) as `name`, (cubecitystatecountry.name) as `name`, sum((basecube.msr2)) "
+        + "as `sum(msr2)` FROM ", ""
         + " join " + getDbName() + "c1_citytable citydim on basecube.cityid = citydim.id and (citydim.dt = 'latest')"
         + " join " + getDbName()
         + "c1_statetable statedim_0 on citydim.stateid=statedim_0.id and statedim_0.dt='latest'"
@@ -617,8 +619,8 @@ public class TestJoinResolver extends TestQueryRewrite {
 
     query = "select cubecity.name, dim4chain.name, testdim3id, avg(msr2) from testcube where " + TWO_DAYS_RANGE;
     hqlQuery = rewrite(query, hconf);
-    expected = getExpectedQuery("testcube", "select cubecity.name as `name`, dim4chain.name as `name`, " +
-        "dim3chain.id as `testdim3id`, avg(testcube.msr2) as `avg(msr2)`"
+    expected = getExpectedQuery("testcube", "select cubecity.name as `name`, dim4chain.name as `name`, "
+        + "dim3chain.id as `testdim3id`, avg(testcube.msr2) as `avg(msr2)`"
         + "FROM ", " join "
         + getDbName() + "c1_testdim2tbl testdim2 ON testcube.dim2 = testdim2.id and testdim2.dt = 'latest'"
         + " join " + getDbName()

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
index 27a18f4..f9a5421 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
@@ -47,24 +47,26 @@ public class TestQueryMetrics extends TestQueryRewrite {
     Assert.assertTrue(reg.getGauges().keySet().containsAll(Arrays.asList(
         "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.AggregateResolver-ITER-6",
         "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.AliasReplacer-ITER-1",
-        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.CandidateTableResolver-ITER-11",
+        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse"
+            + ".CandidateCoveringSetsResolver-ITER-13",
+        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.CandidateTableResolver-ITER-12",
         "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.CandidateTableResolver-ITER-5",
         "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.ColumnResolver-ITER-0",
-        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.DenormalizationResolver-ITER-16",
+        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.DenormalizationResolver-ITER-17",
         "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.DenormalizationResolver-ITER-3",
-        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.ExpressionResolver-ITER-17",
+        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.ExpressionResolver-ITER-18",
         "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.ExpressionResolver-ITER-2",
         "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.FieldValidator-ITER-8",
         "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.GroupbyResolver-ITER-7",
-        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.JoinResolver-ITER-9",
-        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LeastPartitionResolver-ITER-19",
-        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestDimensionResolver-ITER-20",
-        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestFactResolver-ITER-18",
-        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.MaxCoveringFactResolver-ITER-14",
-        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-12",
-        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-13",
-        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-15",
-        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.TimeRangeChecker-ITER-10",
+        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.JoinResolver-ITER-10",
+        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LeastPartitionResolver-ITER-20",
+        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestDimensionResolver-ITER-21",
+        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestFactResolver-ITER-19",
+        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.MaxCoveringFactResolver-ITER-15",
+        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-14",
+        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-16",
+        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-9",
+        "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.TimeRangeChecker-ITER-11",
         "lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.TimerangeResolver-ITER-4")
     ), reg.getGauges().keySet().toString());
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
index a14296c..76ea77d 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
@@ -63,7 +63,6 @@ public class TestRewriterPlan extends TestQueryRewrite {
     Assert.assertTrue(plan.getPartitions().get("c2_testfact").size() > 1);
   }
 
-  //TODO union : Wrong fact name picked. Check after MaxCoveringSetResolver changes.
   @Test
   public void testPlanExtractionForComplexQuery() throws Exception {
     // complex query
@@ -86,7 +85,6 @@ public class TestRewriterPlan extends TestQueryRewrite {
     Assert.assertEquals(plan.getPartitions().get("citytable").size(), 1);
   }
 
-  //TODO union : Wrong fact name picked. Check after MaxCoveringSetResolver changes.
   @Test
   public void testPlanExtractionForMultipleQueries() throws Exception {
     // simple query

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
index 280a8c4..1e5facd 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
@@ -21,15 +21,14 @@ package org.apache.lens.cube.parse;
 
 import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.COLUMN_NOT_FOUND;
-import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.FACT_NOT_AVAILABLE_IN_RANGE;
+import static org.apache.lens.cube.parse.CandidateTablePruneCause.
+    CandidateTablePruneCode.STORAGE_NOT_AVAILABLE_IN_RANGE;
+import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.UNSUPPORTED_STORAGE;
 
 import static org.testng.Assert.assertEquals;
 import static org.testng.Assert.assertTrue;
 
-import java.util.Calendar;
-import java.util.GregorianCalendar;
-import java.util.List;
-import java.util.Set;
+import java.util.*;
 
 import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.metadata.TimeRange;
@@ -74,12 +73,14 @@ public class TestTimeRangeResolver extends TestQueryRewrite {
         getConf());
     NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e;
     PruneCauses.BriefAndDetailedError causes = ne.getJsonMessage();
-    assertTrue(causes.getBrief().contains("Columns [msr2] are not present in any table"));
-    assertEquals(causes.getDetails().size(), 2);
+    assertTrue(causes.getBrief().contains("No storages available for all of these time ranges: "
+          + "[dt [2016-01-01-00:00:00,000 to 2017-01-01-00:00:00,000)]"));
+    assertEquals(causes.getDetails().size(), 3);
 
     Set<CandidateTablePruneCause.CandidateTablePruneCode> expectedPruneCodes = Sets.newTreeSet();
-    expectedPruneCodes.add(FACT_NOT_AVAILABLE_IN_RANGE);
     expectedPruneCodes.add(COLUMN_NOT_FOUND);
+    expectedPruneCodes.add(UNSUPPORTED_STORAGE);
+    expectedPruneCodes.add(STORAGE_NOT_AVAILABLE_IN_RANGE);
     Set<CandidateTablePruneCause.CandidateTablePruneCode> actualPruneCodes = Sets.newTreeSet();
     for (List<CandidateTablePruneCause> cause : causes.getDetails().values()) {
       assertEquals(cause.size(), 1);
@@ -93,13 +94,27 @@ public class TestTimeRangeResolver extends TestQueryRewrite {
     CubeQueryContext ctx =
       rewriteCtx("select msr12 from basecube where " + TWO_DAYS_RANGE + " or " + TWO_DAYS_RANGE_BEFORE_4_DAYS,
         getConf());
-    assertEquals(ctx.getFactPruningMsgs().get(ctx.getMetastoreClient().getCubeFact("testfact_deprecated")).size(), 1);
-    CandidateTablePruneCause pruningMsg =
-      ctx.getFactPruningMsgs().get(ctx.getMetastoreClient().getCubeFact("testfact_deprecated")).get(0);
+    List<CandidateTablePruneCause> causes = findPruningMessagesForStorage("c3_testfact_deprecated",
+      ctx.getStoragePruningMsgs());
+    assertEquals(causes.size(), 1);
+    assertEquals(causes.get(0).getCause(), UNSUPPORTED_STORAGE);
+
+    causes = findPruningMessagesForStorage("c4_testfact_deprecated", ctx.getStoragePruningMsgs());
+    assertEquals(causes.size(), 1);
+    assertEquals(causes.get(0).getCause(), UNSUPPORTED_STORAGE);
+
     // testfact_deprecated's validity should be in between of both ranges. So both ranges should be in the invalid list
     // That would prove that parsing of properties has gone through successfully
-    assertEquals(pruningMsg.getCause(), FACT_NOT_AVAILABLE_IN_RANGE);
-    assertTrue(pruningMsg.getInvalidRanges().containsAll(ctx.getTimeRanges()));
+
+    causes = findPruningMessagesForStorage("c1_testfact_deprecated", ctx.getStoragePruningMsgs());
+    assertEquals(causes.size(), 1);
+    assertEquals(causes.get(0).getCause(), STORAGE_NOT_AVAILABLE_IN_RANGE);
+    assertTrue(causes.get(0).getInvalidRanges().containsAll(ctx.getTimeRanges()));
+
+    causes = findPruningMessagesForStorage("c2_testfact_deprecated", ctx.getStoragePruningMsgs());
+    assertEquals(causes.size(), 1);
+    assertEquals(causes.get(0).getCause(), STORAGE_NOT_AVAILABLE_IN_RANGE);
+    assertTrue(causes.get(0).getInvalidRanges().containsAll(ctx.getTimeRanges()));
   }
 
   @Test
@@ -115,4 +130,21 @@ public class TestTimeRangeResolver extends TestQueryRewrite {
     assertEquals(timeRange.getFromDate(), from.getTime());
     assertEquals(timeRange.getToDate(), dt.toDate());
   }
+
+  /**
+   *
+   * @param stoargeName  storageName_factName
+   * @param allStoragePruningMsgs
+   * @return
+   */
+  private static List<CandidateTablePruneCause> findPruningMessagesForStorage(String stoargeName,
+    PruneCauses<StorageCandidate> allStoragePruningMsgs) {
+    for (StorageCandidate sc : allStoragePruningMsgs.keySet()) {
+      if (sc.getName().equals(stoargeName)) {
+        return allStoragePruningMsgs.get(sc);
+      }
+    }
+    return  new ArrayList<CandidateTablePruneCause>();
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
index 1eb7217..09e09f8 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
@@ -79,7 +79,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     return cal.getTime();
   }
 
-  //TODO union : Wrong fact table picked. Check after MaxCoveringSetResolver
+  //TODO union : Revisit Continuous update period.
   @Test
   public void testCubeQueryContinuousUpdatePeriod() throws Exception {
     LensException th = null;
@@ -102,10 +102,11 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + twoDaysInRangeClause, conf);
     Map<String, String> whereClauses = new HashMap<String, String>();
     whereClauses.put(
-      getDbName() + "c1_testfact",
+      getDbName() + "c2_testfact",
       TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt",
         getDateWithOffset(DAILY, -2), getDateWithOffset(DAILY, 0), CONTINUOUS.format()));
-    String expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) as `sum(msr2)` FROM ", null, null, whereClauses);
+    String expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) as `sum(msr2)` FROM ",
+        null, null, whereClauses);
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
@@ -119,13 +120,13 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
 
     whereClauses = new HashMap<String, String>();
     whereClauses.put(
-      getDbName() + "c1_testfact",
+      getDbName() + "c2_testfact",
       TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt", getDateWithOffset(DAILY, -2),
         getDateWithOffset(DAILY, 0), CONTINUOUS.format())
         + " OR"
         + TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt", getDateWithOffset(DAILY, -6),
         getDateWithOffset(DAILY, 0), CONTINUOUS.format()));
-    expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, whereClauses);
+    expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) as `sum(msr2)` FROM ", null, null, whereClauses);
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
@@ -133,10 +134,10 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     conf.set(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT, "yyyy-MM-dd HH:mm:ss");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     whereClauses = new HashMap<String, String>();
-    whereClauses.put(getDbName() + "c1_testfact", TestBetweenTimeRangeWriter.getBetweenClause(cubeName,
+    whereClauses.put(getDbName() + "c2_testfact", TestBetweenTimeRangeWriter.getBetweenClause(cubeName,
       "dt", getUptoHour(TWODAYS_BACK),
       getUptoHour(NOW), TestTimeRangeWriter.DB_FORMAT));
-    expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, whereClauses);
+    expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) as `sum(msr2)` FROM ", null, null, whereClauses);
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionAndJoinCandidates.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionAndJoinCandidates.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionAndJoinCandidates.java
index 935c739..931f789 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionAndJoinCandidates.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionAndJoinCandidates.java
@@ -107,8 +107,8 @@ public class TestUnionAndJoinCandidates extends TestQueryRewrite {
           + "sum(0.0) as `alias4` FROM TestQueryRewrite.c1_union_join_ctx_fact2";
       expectedInnerSelect3 = "SELECT (basecube.union_join_ctx_cityid) as `alias0`, (cubecityjoinunionctx.name) "
           + "as `alias1`, case  when (basecube.union_join_ctx_cityid) is null then 0 else "
-          + "(basecube.union_join_ctx_cityid) end as `alias2`, sum(0.0) as `alias3`, " +
-          "sum((basecube.union_join_ctx_msr2)) as `alias4` FROM TestQueryRewrite.c1_union_join_ctx_fact3";
+          + "(basecube.union_join_ctx_cityid) end as `alias2`, sum(0.0) as `alias3`, "
+          + "sum((basecube.union_join_ctx_msr2)) as `alias4` FROM TestQueryRewrite.c1_union_join_ctx_fact3";
       String outerGroupBy = "GROUP BY (basecube.alias0), (basecube.alias1), (basecube.alias2)";
       compareContains(outerSelect, rewrittenQuery);
       compareContains(expectedInnerSelect1, rewrittenQuery);
@@ -120,28 +120,25 @@ public class TestUnionAndJoinCandidates extends TestQueryRewrite {
       colsSelected = " union_join_ctx_cityid as `city id`, union_join_ctx_cityname, sum(union_join_ctx_msr1), "
           + "sum(union_join_ctx_msr2), union_join_ctx_non_zero_msr2_sum, union_join_ctx_msr1_greater_than_100, "
           + "sum(union_join_ctx_msr1) + 10 ";
-      //colsSelected = " union_join_ctx_cityid as `city id`, union_join_ctx_msr1_greater_than_100, union_join_ctx_non_zero_msr2_sum ";
       whereCond = " union_join_ctx_zipcode = 'a' and union_join_ctx_cityid = 'b' and "
           + "(" + TWO_MONTHS_RANGE_UPTO_DAYS + ")";
       rewrittenQuery = rewrite("select " + colsSelected + " from basecube where " + whereCond, conf);
       outerSelect = "SELECT (basecube.alias0) as `city id`, (basecube.alias1) as `union_join_ctx_cityname`, "
           + "sum((basecube.alias2)) as `sum(union_join_ctx_msr1)`, sum((basecube.alias3)) "
           + "as `sum(union_join_ctx_msr2)`, sum((basecube.alias4)) as `union_join_ctx_non_zero_msr2_sum`, "
-          + "case  when (sum((basecube.alias5)) > 100) then \"high\" else \"low\" end "
-          + "as `union_join_ctx_msr1_greater_than_100`, (sum((basecube.alias6)) + 10) "
+          + "case  when (sum((basecube.alias2)) > 100) then \"high\" else \"low\" end as "
+          + "`union_join_ctx_msr1_greater_than_100`, (sum((basecube.alias2)) + 10) "
           + "as `(sum(union_join_ctx_msr1) + 10)` FROM ";
       expectedInnerSelect1 = "SELECT (basecube.union_join_ctx_cityid) as `alias0`, "
           + "(cubecityjoinunionctx.name) as `alias1`, sum((basecube.union_join_ctx_msr1)) as `alias2`, "
-          + "sum(0.0) as `alias3`, sum(0.0) as `alias4`, sum((basecube.union_join_ctx_msr1)) as `alias5`, "
-          + "sum((basecube.union_join_ctx_msr1)) as `alias6`";
+          + "sum(0.0) as `alias3`, sum(0.0) as `alias4` FROM";
       expectedInnerSelect2 = "SELECT (basecube.union_join_ctx_cityid) as `alias0`, "
           + "(cubecityjoinunionctx.name) as `alias1`, sum((basecube.union_join_ctx_msr1)) as `alias2`, "
-          + "sum(0.0) as `alias3`, sum(0.0) as `alias4`, sum((basecube.union_join_ctx_msr1)) as `alias5`, "
-          + "sum((basecube.union_join_ctx_msr1)) as `alias6`";
-      expectedInnerSelect3 = "SELECT (basecube.union_join_ctx_cityid) as `alias0`, "
-          + "(cubecityjoinunionctx.name) as `alias1`, sum(0.0) as `alias2`, sum((basecube.union_join_ctx_msr2)) "
-          + "as `alias3`, sum(case  when ((basecube.union_join_ctx_msr2) > 0) then (basecube.union_join_ctx_msr2) "
-          + "else 0 end) as `alias4`, sum(0.0) as `alias5`, sum(0.0) as `alias6`";
+          + "sum(0.0) as `alias3`, sum(0.0) as `alias4` FROM";
+      expectedInnerSelect3 = "SELECT (basecube.union_join_ctx_cityid) as `alias0`, (cubecityjoinunionctx.name) "
+          + "as `alias1`, sum(0.0) as `alias2`, sum((basecube.union_join_ctx_msr2)) as `alias3`, "
+          + "sum(case  when ((basecube.union_join_ctx_msr2) > 0) then (basecube.union_join_ctx_msr2) else 0 end) "
+          + "as `alias4` FROM";
       String innerGroupBy = "GROUP BY (basecube.union_join_ctx_cityid), (cubecityjoinunionctx.name)";
       outerGroupBy = "GROUP BY (basecube.alias0), (basecube.alias1)";
 

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionQueries.java
index 42282e9..c984a05 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionQueries.java
@@ -334,7 +334,6 @@ public class TestUnionQueries extends TestQueryRewrite {
     }
   }
 
-  //TODO union : Revisit after MaxCoveringFactResolver
   @Test
   public void testCubeWhereQueryWithMultipleTables() throws Exception {
     Configuration conf = getConf();


[3/4] lens git commit: Deleted deprecated classes, Fixed Checkstyles, Fixed test cases, Fixed duplicate projections

Posted by pu...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
index 636b1d0..8ba69c4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
@@ -18,7 +18,6 @@
  */
 package org.apache.lens.cube.parse;
 
-import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.*;
 import static org.apache.lens.cube.parse.StorageUtil.*;
 
@@ -32,6 +31,7 @@ import org.apache.lens.server.api.metastore.DataCompletenessChecker;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
@@ -67,6 +67,11 @@ public class StorageCandidate implements Candidate, CandidateTable {
   @Getter
   private TreeSet<UpdatePeriod> validUpdatePeriods = new TreeSet<>();
   private Configuration conf = null;
+
+  /**
+   * This map holds Tags (A tag refers to one or more measures) that have incomplete (below configured threshold) data.
+   * Value is a map of date string and %completeness.
+   */
   @Getter
   private Map<String, Map<String, Float>> dataCompletenessMap = new HashMap<>();
   private SimpleDateFormat partWhereClauseFormat = null;
@@ -94,18 +99,16 @@ public class StorageCandidate implements Candidate, CandidateTable {
   @Getter
   private CubeInterface cube;
   @Getter
-  Map<Dimension, CandidateDim> dimsToQuery;
+  private Map<Dimension, CandidateDim> dimsToQuery;
+  @Getter
+  private Date startTime;
+  @Getter
+  private Date endTime;
   /**
    * Cached fact columns
    */
   private Collection<String> factColumns;
-  /**
-   * This map holds Tags (A tag refers to one or more measures) that have incomplete (below configured threshold) data.
-   * Value is a map of date string and %completeness.
-   */
-  @Getter
-  @Setter
-  private Map<String, Map<String, Float>> incompleteDataDetails;
+
   /**
    * Partition calculated by getPartition() method.
    */
@@ -114,11 +117,13 @@ public class StorageCandidate implements Candidate, CandidateTable {
   /**
    * Non existing partitions
    */
+  @Getter
   private Set<String> nonExistingPartitions = new HashSet<>();
   @Getter
   private int numQueriedParts = 0;
 
-  public StorageCandidate(CubeInterface cube, CubeFactTable fact, String storageName, CubeQueryContext cubeql) {
+  public StorageCandidate(CubeInterface cube, CubeFactTable fact, String storageName, CubeQueryContext cubeql)
+    throws LensException {
     if ((cube == null) || (fact == null) || (storageName == null)) {
       throw new IllegalArgumentException("Cube,fact and storageName should be non null");
     }
@@ -137,12 +142,14 @@ public class StorageCandidate implements Candidate, CandidateTable {
       this.partWhereClauseFormat = new SimpleDateFormat(formatStr);
     }
     completenessPartCol = conf.get(CubeQueryConfUtil.COMPLETENESS_CHECK_PART_COL);
-    client = cubeql.getMetastoreClient();
     completenessThreshold = conf
       .getFloat(CubeQueryConfUtil.COMPLETENESS_THRESHOLD, CubeQueryConfUtil.DEFAULT_COMPLETENESS_THRESHOLD);
+    client = cubeql.getMetastoreClient();
+    startTime = client.getStorageTableStartDate(name, fact.getName());
+    endTime = client.getStorageTableEndDate(name, fact.getName());
   }
 
-  public StorageCandidate(StorageCandidate sc) {
+  public StorageCandidate(StorageCandidate sc) throws LensException {
     this(sc.getCube(), sc.getFact(), sc.getStorageName(), sc.getCubeql());
     // Copy update periods.
     for (UpdatePeriod updatePeriod : sc.getValidUpdatePeriods()) {
@@ -150,42 +157,104 @@ public class StorageCandidate implements Candidate, CandidateTable {
     }
   }
 
-  static boolean containsAny(Collection<String> srcSet, Collection<String> colSet) {
-    if (colSet == null || colSet.isEmpty()) {
-      return true;
+  private void setMissingExpressions(Set<Dimension> queriedDims) throws LensException {
+    setFromString(String.format("%s", getFromTable()));
+    setWhereString(joinWithAnd(
+        genWhereClauseWithDimPartitions(whereString, queriedDims), cubeql.getConf().getBoolean(
+            CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, CubeQueryConfUtil.DEFAULT_REPLACE_TIMEDIM_WITH_PART_COL)
+            ? getPostSelectionWhereClause() : null));
+    if (cubeql.getHavingAST() != null) {
+      queryAst.setHavingAST(MetastoreUtil.copyAST(cubeql.getHavingAST()));
     }
-    for (String column : colSet) {
-      if (srcSet.contains(column)) {
-        return true;
+  }
+
+  private String genWhereClauseWithDimPartitions(String originalWhere, Set<Dimension> queriedDims) {
+    StringBuilder whereBuf;
+    if (originalWhere != null) {
+      whereBuf = new StringBuilder(originalWhere);
+    } else {
+      whereBuf = new StringBuilder();
+    }
+
+    // add where clause for all dimensions
+    if (cubeql != null) {
+      boolean added = (originalWhere != null);
+      for (Dimension dim : queriedDims) {
+        CandidateDim cdim = dimsToQuery.get(dim);
+        String alias = cubeql.getAliasForTableName(dim.getName());
+        if (!cdim.isWhereClauseAdded() && !StringUtils.isBlank(cdim.getWhereClause())) {
+          appendWhereClause(whereBuf, StorageUtil.getWhereClause(cdim, alias), added);
+          added = true;
+        }
       }
     }
-    return false;
+    if (whereBuf.length() == 0) {
+      return null;
+    }
+    return whereBuf.toString();
   }
 
-  private void setMissingExpressions() throws LensException {
-    setFromString(String.format("%s", getFromTable()));
-    setWhereString(joinWithAnd(whereString, null));
-    if (cubeql.getHavingAST() != null) {
-      queryAst.setHavingAST(MetastoreUtil.copyAST(cubeql.getHavingAST()));
+  static void appendWhereClause(StringBuilder filterCondition, String whereClause, boolean hasMore) {
+    // Make sure we add AND only when there are already some conditions in where
+    // clause
+    if (hasMore && !filterCondition.toString().isEmpty() && !StringUtils.isBlank(whereClause)) {
+      filterCondition.append(" AND ");
+    }
+
+    if (!StringUtils.isBlank(whereClause)) {
+      filterCondition.append("(");
+      filterCondition.append(whereClause);
+      filterCondition.append(")");
     }
   }
 
+  protected String getPostSelectionWhereClause() throws LensException {
+    return null;
+  }
+
   public void setAnswerableMeasurePhraseIndices(int index) {
     answerableMeasurePhraseIndices.add(index);
   }
 
-  public String toHQL() throws LensException {
-    setMissingExpressions();
+  public String toHQL(Set<Dimension> queriedDims) throws LensException {
+    setMissingExpressions(queriedDims);
     // Check if the picked candidate is a StorageCandidate and in that case
     // update the selectAST with final alias.
     if (this == cubeql.getPickedCandidate()) {
       CandidateUtil.updateFinalAlias(queryAst.getSelectAST(), cubeql);
+      updateOrderByWithFinalAlias(queryAst.getOrderByAST(), queryAst.getSelectAST());
     }
     return CandidateUtil
       .buildHQLString(queryAst.getSelectString(), fromString, whereString, queryAst.getGroupByString(),
         queryAst.getOrderByString(), queryAst.getHavingString(), queryAst.getLimitValue());
   }
 
+  /**
+   * Update Orderby children with final alias used in select
+   *
+   * @param orderby
+   * @param select
+   */
+  private void updateOrderByWithFinalAlias(ASTNode orderby, ASTNode select) {
+    if (orderby == null) {
+      return;
+    }
+    for(Node orderbyNode : orderby.getChildren()) {
+      ASTNode orderBychild = (ASTNode) orderbyNode;
+      for(Node selectNode : select.getChildren()) {
+        ASTNode selectChild = (ASTNode) selectNode;
+        if (selectChild.getChildCount() == 2) {
+          if (HQLParser.getString((ASTNode) selectChild.getChild(0))
+              .equals(HQLParser.getString((ASTNode) orderBychild.getChild(0)))) {
+            ASTNode alias = new ASTNode((ASTNode) selectChild.getChild(1));
+            orderBychild.replaceChildren(0, 0, alias);
+            break;
+          }
+        }
+      }
+    }
+  }
+
   @Override
   public String getStorageString(String alias) {
     return storageName + " " + alias;
@@ -213,17 +282,6 @@ public class StorageCandidate implements Candidate, CandidateTable {
   }
 
   @Override
-  public Date getStartTime() {
-    // TODO union : get storage stat time and take max out of it
-    return fact.getStartTime();
-  }
-
-  @Override
-  public Date getEndTime() {
-    return fact.getEndTime();
-  }
-
-  @Override
   public double getCost() {
     return fact.weight();
   }
@@ -253,18 +311,19 @@ public class StorageCandidate implements Candidate, CandidateTable {
   /**
    * Gets FactPartitions for the given fact using the following logic
    *
-   * 1. Find the max update interval that will be used for the query. Lets assume time range is 15 Sep to 15 Dec and the
-   * fact has two storage with update periods as MONTHLY,DAILY,HOURLY. In this case the data for
-   * [15 sep - 1 oct)U[1 Dec - 15 Dec) will be answered by DAILY partitions and [1 oct - 1Dec) will be answered by
-   * MONTHLY partitions. The max interavl for this query will be MONTHLY.
+   * 1. Find the max update interval that will be used for the query. Lets assume time
+   * range is 15 Sep to 15 Dec and the fact has two storage with update periods as MONTHLY,DAILY,HOURLY.
+   * In this case the data for [15 sep - 1 oct)U[1 Dec - 15 Dec) will be answered by DAILY partitions
+   * and [1 oct - 1Dec) will be answered by MONTHLY partitions. The max interavl for this query will be MONTHLY.
    *
    * 2.Prune Storgaes that do not fall in the queries time range.
    * {@link CubeMetastoreClient#isStorageTableCandidateForRange(String, Date, Date)}
    *
-   * 3. Iterate over max interavl . In out case it will give two months Oct and Nov. Find partitions for these two months.
-   * Check validity of FactPartitions for Oct and Nov via {@link #updatePartitionStorage(FactPartition)}.
-   * If the partition is missing, try getting partitions for the time range form other update periods (DAILY,HOURLY).This
-   * is achieved by calling getPartitions() recursively but passing only 2 update periods (DAILY,HOURLY)
+   * 3. Iterate over max interavl . In out case it will give two months Oct and Nov. Find partitions for
+   * these two months.Check validity of FactPartitions for Oct and Nov
+   * via {@link #updatePartitionStorage(FactPartition)}.
+   * If the partition is missing, try getting partitions for the time range form other update periods (DAILY,HOURLY).
+   * This is achieved by calling getPartitions() recursively but passing only 2 update periods (DAILY,HOURLY)
    *
    * 4.If the monthly partitions are found, check for lookahead partitions and call getPartitions recursively for the
    * remaining time intervals i.e, [15 sep - 1 oct) and [1 Dec - 15 Dec)
@@ -296,7 +355,7 @@ public class StorageCandidate implements Candidate, CandidateTable {
 
     if (!client.isStorageTableCandidateForRange(name, fromDate, toDate)) {
       cubeql.addStoragePruningMsg(this,
-        new CandidateTablePruneCause(CandidateTablePruneCause.CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE));
+          new CandidateTablePruneCause(CandidateTablePruneCause.CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE));
       return false;
     } else if (!client.partColExists(name, partCol)) {
       log.info("{} does not exist in {}", partCol, name);
@@ -310,7 +369,7 @@ public class StorageCandidate implements Candidate, CandidateTable {
     Date floorToDate = DateUtil.getFloorDate(toDate, interval);
 
     int lookAheadNumParts = conf
-      .getInt(CubeQueryConfUtil.getLookAheadPTPartsKey(interval), CubeQueryConfUtil.DEFAULT_LOOK_AHEAD_PT_PARTS);
+        .getInt(CubeQueryConfUtil.getLookAheadPTPartsKey(interval), CubeQueryConfUtil.DEFAULT_LOOK_AHEAD_PT_PARTS);
 
     TimeRange.Iterable.Iterator iter = TimeRange.iterable(ceilFromDate, floorToDate, interval, 1).iterator();
     // add partitions from ceilFrom to floorTo
@@ -340,12 +399,12 @@ public class StorageCandidate implements Candidate, CandidateTable {
           // look-ahead
           // process time are present
           TimeRange.Iterable.Iterator processTimeIter = TimeRange.iterable(nextDt, lookAheadNumParts, interval, 1)
-            .iterator();
+              .iterator();
           while (processTimeIter.hasNext()) {
             Date pdt = processTimeIter.next();
             Date nextPdt = processTimeIter.peekNext();
             FactPartition processTimePartition = new FactPartition(processTimePartCol, pdt, interval, null,
-              partWhereClauseFormat);
+                partWhereClauseFormat);
             updatePartitionStorage(processTimePartition);
             if (processTimePartition.isFound()) {
               log.debug("Finer parts not required for look-ahead partition :{}", part);
@@ -359,15 +418,15 @@ public class StorageCandidate implements Candidate, CandidateTable {
                 // Get partitions for look ahead process time
                 log.debug("Looking for process time partitions between {} and {}", pdt, nextPdt);
                 Set<FactPartition> processTimeParts = getPartitions(
-                  TimeRange.getBuilder().fromDate(pdt).toDate(nextPdt).partitionColumn(processTimePartCol).build(),
-                  newset, true, failOnPartialData, missingPartitions);
+                    TimeRange.getBuilder().fromDate(pdt).toDate(nextPdt).partitionColumn(processTimePartCol).build(),
+                    newset, true, failOnPartialData, missingPartitions);
                 log.debug("Look ahead partitions: {}", processTimeParts);
                 TimeRange timeRange = TimeRange.getBuilder().fromDate(dt).toDate(nextDt).build();
                 for (FactPartition pPart : processTimeParts) {
                   log.debug("Looking for finer partitions in pPart: {}", pPart);
                   for (Date date : timeRange.iterable(pPart.getPeriod(), 1)) {
                     FactPartition innerPart = new FactPartition(partCol, date, pPart.getPeriod(), pPart,
-                      partWhereClauseFormat);
+                        partWhereClauseFormat);
                     updatePartitionStorage(innerPart);
                     innerPart.setFound(pPart.isFound());
                     if (innerPart.isFound()) {
@@ -408,9 +467,10 @@ public class StorageCandidate implements Candidate, CandidateTable {
       }
     }
     return
-      getPartitions(fromDate, ceilFromDate, partCol, partitions, updatePeriods, addNonExistingParts, failOnPartialData,
-        missingPartitions) && getPartitions(floorToDate, toDate, partCol, partitions, updatePeriods,
-        addNonExistingParts, failOnPartialData, missingPartitions);
+        getPartitions(fromDate, ceilFromDate, partCol, partitions, updatePeriods,
+            addNonExistingParts, failOnPartialData, missingPartitions)
+            && getPartitions(floorToDate, toDate, partCol, partitions, updatePeriods,
+              addNonExistingParts, failOnPartialData, missingPartitions);
   }
 
   /**
@@ -429,9 +489,8 @@ public class StorageCandidate implements Candidate, CandidateTable {
     // Check the measure tags.
     if (!evaluateMeasuresCompleteness(timeRange)) {
       log
-        .info("Fact table:{} has partitions with incomplete data: {} for given ranges: {}", fact, dataCompletenessMap,
-          cubeql.getTimeRanges());
-      cubeql.addStoragePruningMsg(this, incompletePartitions(dataCompletenessMap));
+        .info("Storage candidate:{} has partitions with incomplete data: {} for given ranges: {}", this,
+            dataCompletenessMap, cubeql.getTimeRanges());
       if (failOnPartialData) {
         return false;
       }
@@ -482,9 +541,11 @@ public class StorageCandidate implements Candidate, CandidateTable {
         break;
       }
     }
+    // Add all the partitions. participatingPartitions contains all the partitions for previous time ranges also.
+    this.participatingPartitions.addAll(rangeParts);
     numQueriedParts += rangeParts.size();
     if (!unsupportedTimeDims.isEmpty()) {
-      log.info("Not considering fact table:{} as it doesn't support time dimensions: {}", this.getFact(),
+      log.info("Not considering storage candidate:{} as it doesn't support time dimensions: {}", this,
         unsupportedTimeDims);
       cubeql.addStoragePruningMsg(this, timeDimNotSupported(unsupportedTimeDims));
       return false;
@@ -493,7 +554,7 @@ public class StorageCandidate implements Candidate, CandidateTable {
     // TODO union : Relook at this.
     nonExistingPartitions.addAll(nonExistingParts);
     if (rangeParts.size() == 0 || (failOnPartialData && !nonExistingParts.isEmpty())) {
-      log.info("No partitions for fallback range:{}", timeRange);
+      log.info("Not considering storage candidate:{} as no partitions for fallback range:{}", this, timeRange);
       return false;
     }
     String extraWhere = extraWhereClauseFallback.toString();
@@ -505,8 +566,6 @@ public class StorageCandidate implements Candidate, CandidateTable {
       rangeToWhere.put(parentTimeRange, rangeWriter
         .getTimeRangeWhereClause(cubeql, cubeql.getAliasForTableName(cubeql.getCube().getName()), rangeParts));
     }
-    // Add all the partitions. participatingPartitions contains all the partitions for previous time ranges also.
-    this.participatingPartitions.addAll(rangeParts);
     return true;
   }
 
@@ -559,7 +618,7 @@ public class StorageCandidate implements Candidate, CandidateTable {
               dataCompletenessMap.put(measureorExprFromTag, incompletePartition);
             }
             incompletePartition.put(formatter.format(completenessResult.getKey()), completenessResult.getValue());
-            isDataComplete = true;
+            isDataComplete = false;
           }
         }
       }
@@ -600,7 +659,7 @@ public class StorageCandidate implements Candidate, CandidateTable {
       ASTNode selectExpr = (ASTNode) queryAst.getSelectAST().getChild(currentChild);
       Set<String> exprCols = HQLParser.getColsInExpr(cubeql.getAliasForTableName(cubeql.getCube()), selectExpr);
       if (getColumns().containsAll(exprCols)) {
-        ASTNode aliasNode = HQLParser.findNodeByPath(selectExpr, Identifier);
+        ASTNode aliasNode = HQLParser.findNodeByPath(selectExpr, HiveParser.Identifier);
         String alias = cubeql.getSelectPhrases().get(i).getSelectAlias();
         if (aliasNode != null) {
           String queryAlias = aliasNode.getText();
@@ -666,9 +725,9 @@ public class StorageCandidate implements Candidate, CandidateTable {
 
   private String getFromTable() throws LensException {
     if (cubeql.isAutoJoinResolved()) {
-        return fromString;
+      return fromString;
     } else {
-        return cubeql.getQBFromString(this, getDimsToQuery());
+      return cubeql.getQBFromString(this, getDimsToQuery());
     }
   }
 
@@ -685,5 +744,4 @@ public class StorageCandidate implements Candidate, CandidateTable {
     }
     return ret;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index 57b4cf0..3029589 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -18,15 +18,7 @@
  */
 package org.apache.lens.cube.parse;
 
-//import static org.apache.lens.cube.metadata.MetastoreUtil.getFactOrDimtableStorageTableName;
-//import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE;
-//import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.INVALID;
-//import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.UNSUPPORTED_STORAGE;
-//import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.NO_PARTITIONS;
-//import static org.apache.lens.cube.parse.CandidateTablePruneCause.missingPartitions;
-//import static org.apache.lens.cube.parse.CandidateTablePruneCause.noCandidateStorages;
-//import static org.apache.lens.cube.parse.StorageUtil.getFallbackRange;
-
+import static org.apache.lens.cube.parse.CandidateTablePruneCause.incompletePartitions;
 
 import java.util.*;
 
@@ -39,7 +31,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 
 import lombok.extern.slf4j.Slf4j;
-
 /**
  * Resolve storages and partitions of all candidate tables and prunes candidate tables with missing storages or
  * partitions.
@@ -57,8 +48,6 @@ class StorageTableResolver implements ContextRewriter {
   private final Map<String, Set<String>> nonExistingPartitions = new HashMap<>();
   CubeMetastoreClient client;
   private PHASE phase;
-  // TODO union : we do not need this. Remove the storage candidate
-  //private HashMap<CubeFactTable, Map<String, SkipStorageCause>> skipStorageCausesPerFact;
   private float completenessThreshold;
   private String completenessPartCol;
 
@@ -136,15 +125,15 @@ class StorageTableResolver implements ContextRewriter {
       for (TimeRange range : cubeql.getTimeRanges()) {
         isComplete &= candidate.evaluateCompleteness(range, range, failOnPartialData);
       }
-      if (!isComplete) {
+      if (failOnPartialData &&  !isComplete) {
         candidateIterator.remove();
-
+        log.info("Not considering candidate:{} as its data is not is not complete", candidate);
         Set<StorageCandidate> scSet = CandidateUtil.getStorageCandidates(candidate);
-        Set<String> missingPartitions;
         for (StorageCandidate sc : scSet) {
-          missingPartitions = CandidateUtil.getMissingPartitions(sc);
-          if (!missingPartitions.isEmpty()) {
-            cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.missingPartitions(missingPartitions));
+          if (!sc.getNonExistingPartitions().isEmpty()) {
+            cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.missingPartitions(sc.getNonExistingPartitions()));
+          } else if (!sc.getDataCompletenessMap().isEmpty()) {
+            cubeql.addStoragePruningMsg(sc, incompletePartitions(sc.getDataCompletenessMap()));
           }
         }
       }
@@ -179,10 +168,11 @@ class StorageTableResolver implements ContextRewriter {
         Map<String, CandidateTablePruneCode> skipStorageCauses = new HashMap<>();
         for (String storage : dimtable.getStorages()) {
           if (isStorageSupportedOnDriver(storage)) {
-            String tableName = MetastoreUtil.getFactOrDimtableStorageTableName(dimtable.getName(), storage).toLowerCase();
+            String tableName = MetastoreUtil.getFactOrDimtableStorageTableName(dimtable.getName(),
+                storage).toLowerCase();
             if (validDimTables != null && !validDimTables.contains(tableName)) {
               log.info("Not considering dim storage table:{} as it is not a valid dim storage", tableName);
-              skipStorageCauses.put(tableName,CandidateTablePruneCode.INVALID);
+              skipStorageCauses.put(tableName, CandidateTablePruneCode.INVALID);
               continue;
             }
 
@@ -278,21 +268,16 @@ class StorageTableResolver implements ContextRewriter {
         boolean partitionColumnExists = client.partColExists(storageTable, range.getPartitionColumn());
         valid = partitionColumnExists;
         if (!partitionColumnExists) {
-          //TODO union : handle prune cause below case.
           String timeDim = cubeql.getBaseCube().getTimeDimOfPartitionColumn(range.getPartitionColumn());
-          //          if (!sc.getFact().getColumns().contains(timeDim)) {
-          //           // Not a time dimension so no fallback required.
-          //          pruningCauses.add(TIMEDIM_NOT_SUPPORTED);
-          //        continue;
-          //       }
-          TimeRange fallBackRange = StorageUtil.getFallbackRange(range, sc.getFact().getCubeName(), cubeql);
+          TimeRange fallBackRange = StorageUtil.getFallbackRange(range, sc.getFact().getName(), cubeql);
           if (fallBackRange == null) {
             log.info("No partitions for range:{}. fallback range: {}", range, fallBackRange);
             pruningCauses.add(CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE);
             continue;
           }
-          valid = client
-            .isStorageTableCandidateForRange(storageTable, fallBackRange.getFromDate(), fallBackRange.getToDate());
+          valid = client.partColExists(storageTable, fallBackRange.getPartitionColumn())
+              && client.isStorageTableCandidateForRange(storageTable, fallBackRange.getFromDate(),
+                  fallBackRange.getToDate());
           if (!valid) {
             pruningCauses.add(CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE);
           }

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
index fe867c7..e37db8b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
@@ -36,7 +36,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 
-import com.google.common.collect.Lists;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
index 91276cd..d97e7b8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
 package org.apache.lens.cube.parse;
 
 import java.util.*;
@@ -88,10 +106,10 @@ public class UnionCandidate implements Candidate {
     if (this.equals(candidate)) {
       return true;
     }
-
     for (Candidate child : childCandidates) {
-      if (child.contains((candidate)))
+      if (child.contains((candidate))) {
         return true;
+      }
     }
     return false;
   }
@@ -261,4 +279,4 @@ public class UnionCandidate implements Candidate {
     builder.partitionColumn(timeRange.getPartitionColumn());
     return builder;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
index eb0e545..daf3daf 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
@@ -19,22 +19,29 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.parse.HQLParser.*;
+
+import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
+
+import java.util.*;
+
+import org.apache.lens.cube.metadata.Dimension;
+import org.apache.lens.cube.metadata.MetastoreUtil;
+import org.apache.lens.server.api.error.LensException;
 
-import org.antlr.runtime.CommonToken;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.lens.cube.metadata.MetastoreUtil;
-import org.apache.lens.server.api.error.LensException;
 
-import java.util.*;
-
-import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
-import static org.apache.lens.cube.parse.HQLParser.*;
+import org.antlr.runtime.CommonToken;
 
 import lombok.extern.slf4j.Slf4j;
 
+/**
+ * Utility class to write union query. Given any complex Join or Union Candidate,
+ * this class rewrites union query for all the participating StorageCandidates.
+ */
 @Slf4j
 public class UnionQueryWriter {
 
@@ -54,7 +61,7 @@ public class UnionQueryWriter {
     storageCandidates = CandidateUtil.getStorageCandidates(cand);
   }
 
-  public String toHQL() throws LensException {
+  public String toHQL(Map<StorageCandidate, Set<Dimension>> factDimMap) throws LensException {
     StorageCandidate firstCandidate = storageCandidates.iterator().next();
     // Set the default queryAST for the outer query
     queryAst = DefaultQueryAST.fromStorageCandidate(firstCandidate,
@@ -65,7 +72,7 @@ public class UnionQueryWriter {
     processGroupByAST();
     processOrderByAST();
     CandidateUtil.updateFinalAlias(queryAst.getSelectAST(), cubeql);
-    return CandidateUtil.buildHQLString(queryAst.getSelectString(), getFromString(), null,
+    return CandidateUtil.buildHQLString(queryAst.getSelectString(), getFromString(factDimMap), null,
         queryAst.getGroupByString(), queryAst.getOrderByString(),
         queryAst.getHavingString(), queryAst.getLimitValue());
   }
@@ -107,7 +114,7 @@ public class UnionQueryWriter {
    * @throws LensException
    */
   private ASTNode processHavingAST(ASTNode innerAst, AliasDecider aliasDecider, StorageCandidate sc)
-      throws LensException {
+    throws LensException {
     if (cubeql.getHavingAST() != null) {
       ASTNode havingCopy = MetastoreUtil.copyAST(cubeql.getHavingAST());
       Set<ASTNode> havingAggChildrenASTs = new LinkedHashSet<>();
@@ -121,7 +128,7 @@ public class UnionQueryWriter {
   }
 
   /**
-   * Update havingAST with proper alias name projected.
+   * Update outer havingAST with proper alias name projected.
    *
    * @param node
    * @return
@@ -131,9 +138,9 @@ public class UnionQueryWriter {
         && (HQLParser.isAggregateAST(node))) {
       if (innerToOuterSelectASTs.containsKey(new HQLParser.HashableASTNode(node))
           || innerToOuterHavingASTs.containsKey(new HQLParser.HashableASTNode(node))) {
-        ASTNode expr = innerToOuterSelectASTs.containsKey(new HQLParser.HashableASTNode(node)) ?
-            innerToOuterSelectASTs.get(new HQLParser.HashableASTNode(node)) :
-            innerToOuterHavingASTs.get(new HQLParser.HashableASTNode(node));
+        ASTNode expr = innerToOuterSelectASTs.containsKey(new HQLParser.HashableASTNode(node))
+            ? innerToOuterSelectASTs.get(new HQLParser.HashableASTNode(node))
+            : innerToOuterHavingASTs.get(new HQLParser.HashableASTNode(node));
         node.getParent().setChild(0, expr);
       }
     }
@@ -190,15 +197,18 @@ public class UnionQueryWriter {
     return outerExpression;
   }
 
-  private ASTNode getDefaultNode(ASTNode aliasNode) throws LensException {
-    ASTNode defaultNode = getSelectExprAST();
-    defaultNode.addChild(HQLParser.parseExpr(DEFAULT_MEASURE));
-    defaultNode.addChild(aliasNode);
-    return defaultNode;
-  }
-
+  /**
+   * Get the select expression. In case of node is default retunrs "0.0" with alias
+   * otherwise the select phrase with alias.
+   *
+   * @param nodeWithoutAlias
+   * @param aliasNode
+   * @param isDefault
+   * @return
+   * @throws LensException
+   */
   private ASTNode getSelectExpr(ASTNode nodeWithoutAlias, ASTNode aliasNode, boolean isDefault)
-      throws LensException {
+    throws LensException {
     ASTNode node = getSelectExprAST();
     if (nodeWithoutAlias == null && isDefault) {
       node.addChild(HQLParser.parseExpr(DEFAULT_MEASURE));
@@ -215,6 +225,15 @@ public class UnionQueryWriter {
     return new ASTNode(new CommonToken(HiveParser.TOK_SELEXPR, "TOK_SELEXPR"));
   }
 
+
+  /**
+   * Get the aggregate node for the SelectPhrase index. A given measure might not be answerable
+   * for a StorageCanddate. In that case get the non default aggregate node wcich ideally not "0.0",
+   * from otherStorage candidate.
+   *
+   * @param position
+   * @return
+   */
   private ASTNode getAggregateNodesExpression(int position) {
     ASTNode node = null;
     for (StorageCandidate sc : storageCandidates) {
@@ -226,21 +245,33 @@ public class UnionQueryWriter {
     return MetastoreUtil.copyAST(node);
   }
 
+  /**
+   *  Check if ASTNode is answerable by StorageCandidate
+   * @param sc
+   * @param node
+   * @return
+   */
   private boolean isNodeAnswerableForStorageCandidate(StorageCandidate sc, ASTNode node) {
     Set<String> cols = new LinkedHashSet<>();
     getAllColumnsOfNode(node, cols);
     if (!sc.getColumns().containsAll(cols)) {
       return true;
     }
-  return false;
+    return false;
   }
 
-  private ASTNode setDefaultValueInExprForAggregateNodes(ASTNode node, StorageCandidate sc)
-      throws LensException {
+  /**
+   * Set the default value "0.0" in the non answerable aggreagte expressions.
+   * @param node
+   * @param sc
+   * @return
+   * @throws LensException
+   */
+  private ASTNode setDefaultValueInExprForAggregateNodes(ASTNode node, StorageCandidate sc) throws LensException {
     if (HQLParser.isAggregateAST(node)
         && isNodeAnswerableForStorageCandidate(sc, node)) {
-      node.setChild(1, getSelectExpr(null, null, true) );
-      }
+      node.setChild(1, getSelectExpr(null, null, true));
+    }
     for (int i = 0; i < node.getChildCount(); i++) {
       ASTNode child = (ASTNode) node.getChild(i);
       setDefaultValueInExprForAggregateNodes(child, sc);
@@ -248,15 +279,111 @@ public class UnionQueryWriter {
     return node;
   }
 
-
   private boolean isAggregateFunctionUsedInAST(ASTNode node) {
-      if (HQLParser.isAggregateAST(node)
-          || HQLParser.hasAggregate(node)) {
+    if (HQLParser.isAggregateAST(node)
+        || HQLParser.hasAggregate(node)) {
+      return true;
+    }
+    return false;
+  }
+
+  private boolean isNodeDefault(ASTNode node) {
+    if (HQLParser.isAggregateAST((ASTNode) node.getChild(0))) {
+      if (HQLParser.getString((ASTNode) node.getChild(0).getChild(1)).equals(DEFAULT_MEASURE)) {
         return true;
       }
+    }
     return false;
   }
 
+  private List<ASTNode> getProjectedNonDefaultPhrases() {
+    List<ASTNode> phrases = new ArrayList<>();
+    int selectPhraseCount = cubeql.getSelectPhrases().size();
+    for (int i = 0; i < selectPhraseCount; i++) {
+      for (StorageCandidate sc : storageCandidates) {
+        ASTNode selectAST = sc.getQueryAst().getSelectAST();
+        if (isNodeDefault((ASTNode) selectAST.getChild(i))) {
+          continue;
+        } else {
+          phrases.add((ASTNode) selectAST.getChild(i));
+          break;
+        }
+      }
+    }
+    return phrases;
+  }
+
+  private void removeRedundantProjectedPhrases() {
+    List<ASTNode> phrases = getProjectedNonDefaultPhrases();
+    List<String> phrasesWithoutAlias = new ArrayList<>();
+    // populate all phrases without alias
+    for (ASTNode node : phrases) {
+      phrasesWithoutAlias.add(HQLParser.getString((ASTNode) node.getChild(0)));
+    }
+    Map<String, List<Integer>> phraseCountMap = new HashMap<>();
+    Map<String, List<String>> aliasMap = new HashMap<>();
+    for (int i = 0; i < phrasesWithoutAlias.size(); i++) {
+      String phrase = phrasesWithoutAlias.get(i);
+      if (phraseCountMap.containsKey(phrase)) {
+        phraseCountMap.get(phrase).add(i);
+      } else {
+        List<Integer> indices = new ArrayList<>();
+        indices.add(i);
+        phraseCountMap.put(phrase, indices);
+      }
+    }
+    for (List<Integer> values : phraseCountMap.values()) {
+      if (values.size() > 1) {
+        String aliasToKeep = HQLParser.findNodeByPath((ASTNode)
+            phrases.get(values.get(0)), Identifier).toString();
+        ArrayList<String> dupAliases = new ArrayList<>();
+        for (int i : values.subList(1, values.size())) {
+          dupAliases.add(HQLParser.findNodeByPath((ASTNode)
+              phrases.get(i), Identifier).toString());
+        }
+        aliasMap.put(aliasToKeep, dupAliases);
+      }
+    }
+
+    for (String col : phraseCountMap.keySet()) {
+      if (phraseCountMap.get(col).size() > 1) {
+        List<Integer> childenToDelete = phraseCountMap.get(col).
+            subList(1, phraseCountMap.get(col).size());
+        int counter = 0;
+        for (int i : childenToDelete) {
+          for (StorageCandidate sc : storageCandidates) {
+            sc.getQueryAst().getSelectAST().deleteChild(i - counter);
+          }
+          counter++;
+        }
+      }
+    }
+    updateOuterSelectDuplicateAliases(queryAst.getSelectAST(), aliasMap);
+  }
+
+  public void updateOuterSelectDuplicateAliases(ASTNode node,
+      Map<String, List<String>> aliasMap) {
+    if (node.getToken().getType() == HiveParser.DOT) {
+      String table = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL, Identifier).toString();
+      String col = node.getChild(1).toString();
+      for (Map.Entry<String, List<String>> entry : aliasMap.entrySet()) {
+        if (entry.getValue().contains(col)) {
+          try {
+            node.setChild(1, HQLParser.parseExpr(entry.getKey()));
+          } catch (LensException e) {
+            log.error("Unable to parse select expression: {}.", entry.getKey());
+          }
+        }
+
+      }
+    }
+    for (int i = 0; i < node.getChildCount(); i++) {
+      ASTNode child = (ASTNode) node.getChild(i);
+      updateOuterSelectDuplicateAliases(child, aliasMap);
+    }
+  }
+
+
   /**
    * Set the default value for the non queriable measures. If a measure is not
    * answerable from a StorageCandidate set it as 0.0
@@ -267,12 +394,14 @@ public class UnionQueryWriter {
     for (int i = 0; i < cubeql.getSelectPhrases().size(); i++) {
       SelectPhraseContext phrase = cubeql.getSelectPhrases().get(i);
       ASTNode aliasNode = new ASTNode(new CommonToken(Identifier, phrase.getSelectAlias()));
+      // Select phrase is dimension
       if (!phrase.hasMeasures(cubeql)) {
         for (StorageCandidate sc : storageCandidates) {
           ASTNode exprWithOutAlias = (ASTNode) sc.getQueryAst().getSelectAST().getChild(i).getChild(0);
           storageCandidateToSelectAstMap.get(sc.toString()).
               addChild(getSelectExpr(exprWithOutAlias, aliasNode, false));
         }
+        // Select phrase is measure
       } else if (!phrase.getQueriedMsrs().isEmpty()) {
         for (StorageCandidate sc : storageCandidates) {
           if (sc.getAnswerableMeasurePhraseIndices().contains(phrase.getPosition())) {
@@ -290,6 +419,7 @@ public class UnionQueryWriter {
                 addChild(getSelectExpr(resolvedExprNode, aliasNode, false));
           }
         }
+        // Select phrase is expression
       } else {
         for (StorageCandidate sc : storageCandidates) {
           if (phrase.isEvaluable(cubeql, sc)
@@ -312,6 +442,11 @@ public class UnionQueryWriter {
     }
   }
 
+  /**
+   * Update Select and Having clause of outer query.
+   *
+   * @throws LensException
+   */
   private void processSelectAndHavingAST() throws LensException {
     ASTNode outerSelectAst = new ASTNode(queryAst.getSelectAST());
     DefaultAliasDecider aliasDecider = new DefaultAliasDecider();
@@ -329,8 +464,18 @@ public class UnionQueryWriter {
       aliasDecider.setCounter(selectAliasCounter);
       processHavingAST(sc.getQueryAst().getSelectAST(), aliasDecider, sc);
     }
+    removeRedundantProjectedPhrases();
   }
 
+  /**
+   * Get the inner and outer AST with alias for each child of StorageCandidate
+   *
+   * @param sc
+   * @param outerSelectAst
+   * @param innerSelectAST
+   * @param aliasDecider
+   * @throws LensException
+   */
   private void processSelectExpression(StorageCandidate sc, ASTNode outerSelectAst, ASTNode innerSelectAST,
       AliasDecider aliasDecider) throws LensException {
     //ASTNode selectAST = sc.getQueryAst().getSelectAST();
@@ -361,22 +506,21 @@ public class UnionQueryWriter {
   }
 
   /*
-
-Perform a DFS on the provided AST, and Create an AST of similar structure with changes specific to the
-inner query - outer query dynamics. The resultant AST is supposed to be used in outer query.
-
-Base cases:
- 1. ast is null => null
- 2. ast is aggregate_function(table.column) => add aggregate_function(table.column) to inner select expressions,
-          generate alias, return aggregate_function(cube.alias). Memoize the mapping
-          aggregate_function(table.column) => aggregate_function(cube.alias)
-          Assumption is aggregate_function is transitive i.e. f(a,b,c,d) = f(f(a,b), f(c,d)). SUM, MAX, MIN etc
-          are transitive, while AVG, COUNT etc are not. For non-transitive aggregate functions, the re-written
-          query will be incorrect.
- 3. ast has aggregates - iterate over children and add the non aggregate nodes as is and recursively get outer ast
- for aggregate.
- 4. If no aggregates, simply select its alias in outer ast.
- 5. If given ast is memorized as mentioned in the above cases, return the mapping.
+  Perform a DFS on the provided AST, and Create an AST of similar structure with changes specific to the
+  inner query - outer query dynamics. The resultant AST is supposed to be used in outer query.
+
+  Base cases:
+   1. ast is null => null
+   2. ast is aggregate_function(table.column) => add aggregate_function(table.column) to inner select expressions,
+            generate alias, return aggregate_function(cube.alias). Memoize the mapping
+            aggregate_function(table.column) => aggregate_function(cube.alias)
+            Assumption is aggregate_function is transitive i.e. f(a,b,c,d) = f(f(a,b), f(c,d)). SUM, MAX, MIN etc
+            are transitive, while AVG, COUNT etc are not. For non-transitive aggregate functions, the re-written
+            query will be incorrect.
+   3. ast has aggregates - iterate over children and add the non aggregate nodes as is and recursively get outer ast
+   for aggregate.
+   4. If no aggregates, simply select its alias in outer ast.
+   5. If given ast is memorized as mentioned in the above cases, return the mapping.
  */
   private ASTNode getOuterAST(ASTNode astNode, ASTNode innerSelectAST,
       AliasDecider aliasDecider, StorageCandidate sc, boolean isSelectAst) throws LensException {
@@ -402,7 +546,7 @@ Base cases:
           if (hasAggregate(childAST) && sc.getColumns().containsAll(msrCols)) {
             outerAST.addChild(getOuterAST(childAST, innerSelectAST, aliasDecider, sc, isSelectAst));
           } else if (hasAggregate(childAST) && !sc.getColumns().containsAll(msrCols)) {
-            childAST.replaceChildren(1, 1, getDefaultNode(null));
+            childAST.replaceChildren(1, 1,  getSelectExpr(null, null, true));
             outerAST.addChild(getOuterAST(childAST, innerSelectAST, aliasDecider, sc, isSelectAst));
           } else {
             outerAST.addChild(childAST);
@@ -456,6 +600,14 @@ Base cases:
     return outerAST;
   }
 
+  /**
+   * GroupbyAST is having dim only columns all the columns should have been
+   * projected. Get the alias for the projected columns and add to group by clause.
+   *
+   * @param astNode
+   * @return
+   * @throws LensException
+   */
 
   private ASTNode processGroupByExpression(ASTNode astNode) throws LensException {
     ASTNode outerExpression = new ASTNode(astNode);
@@ -469,12 +621,21 @@ Base cases:
     return outerExpression;
   }
 
-  private void processHavingExpression(ASTNode innerSelectAst,Set<ASTNode> havingAggASTs,
+  /**
+   * Process having clause, if a columns is not projected add it
+   * to the projected columns of inner selectAST.
+   *
+   * @param innerSelectAst
+   * @param havingAggASTs
+   * @param aliasDecider
+   * @param sc
+   * @throws LensException
+   */
+
+  private void processHavingExpression(ASTNode innerSelectAst, Set<ASTNode> havingAggASTs,
       AliasDecider aliasDecider, StorageCandidate sc) throws LensException {
     // iterate over all children of the ast and get outer ast corresponding to it.
     for (ASTNode child : havingAggASTs) {
-      //ASTNode node = MetastoreUtil.copyAST(child);
-      //setDefaultValueInExprForAggregateNodes(node, sc);
       if (!innerToOuterSelectASTs.containsKey(new HQLParser.HashableASTNode(child))) {
         getOuterAST(child, innerSelectAst, aliasDecider, sc, false);
       }
@@ -483,6 +644,7 @@ Base cases:
 
   /**
    * Gets all aggreage nodes used in having
+   *
    * @param node
    * @param havingClauses
    * @return
@@ -498,6 +660,13 @@ Base cases:
     return havingClauses;
   }
 
+  /**
+   * Get columns used in ASTNode
+   *
+   * @param node
+   * @param msrs
+   * @return
+   */
   private Set<String> getAllColumnsOfNode(ASTNode node, Set<String> msrs) {
     if (node.getToken().getType() == HiveParser.DOT) {
       String table = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL, Identifier).toString();
@@ -513,14 +682,16 @@ Base cases:
   /**
    * Gets from string of the ouer query, this is a union query of all
    * StorageCandidates participated.
+   *
    * @return
    * @throws LensException
    */
-  private String getFromString() throws LensException {
+  private String getFromString(Map<StorageCandidate, Set<Dimension>> factDimMap) throws LensException {
     StringBuilder from = new StringBuilder();
     List<String> hqlQueries = new ArrayList<>();
     for (StorageCandidate sc : storageCandidates) {
-      hqlQueries.add(" ( " + sc.toHQL() + " ) ");
+      Set<Dimension> queriedDims = factDimMap.get(sc);
+      hqlQueries.add(sc.toHQL(queriedDims));
     }
     return from.append(" ( ")
         .append(StringUtils.join(" UNION ALL ", hqlQueries))

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
index 2bf3159..b5b0b30 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
@@ -169,7 +169,6 @@ public class AutoJoinContext {
     joinPathFromColumns.remove(dim);
   }
 
-  //TODO union: use StaorgeCandidate
   public String getFromString(String fromTable, StorageCandidate sc, Set<Dimension> qdims,
     Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql, QueryAST ast) throws LensException {
     String fromString = fromTable;
@@ -348,7 +347,6 @@ public class AutoJoinContext {
     return allPaths;
   }
 
-  //TODO union: use Set<StorageCandidate>
   /**
    * Prunes the join chains defined in Cube whose starting column is not there in any of the candidate facts.
    * Same is done in case of join paths defined in Dimensions.

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
index 928a2cb..f4049f5 100644
--- a/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
+++ b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
@@ -56,7 +56,6 @@ public final class RewriterPlan extends DriverQueryPlan {
           }
         }
       }
-      //TODO union: updated code to work on picked Candidate
       if (ctx.getPickedCandidate() != null) {
         for (StorageCandidate sc : CandidateUtil.getStorageCandidates(ctx.getPickedCandidate())) {
           addTablesQueried(sc.getAliasForTable(""));

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 9878158..194ab7c 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -680,11 +680,12 @@ public class CubeTestSetup {
         "Not null cityid Expr", "case when cityid is null then 0 else cityid end"));
     // union join context
     exprs.add(new ExprColumn(new FieldSchema(prefix + "notnullcityid", "int", prefix + "Not null cityid"),
-        prefix + "Not null cityid Expr", "case when union_join_ctx_cityid is null then 0 else union_join_ctx_cityid end"));
+        prefix + "Not null cityid Expr", "case when union_join_ctx_cityid is null then 0 "
+        + "else union_join_ctx_cityid end"));
     exprs.add(new ExprColumn(new FieldSchema(prefix + "sum_msr1_msr2", "int", prefix + "sum of msr1 and msr2"),
         prefix + "sum of msr1 and msr2", "sum(union_join_ctx_msr1) + sum(union_join_ctx_msr2)"));
-   exprs.add(new ExprColumn(new FieldSchema(prefix + "msr1_greater_than_100", "int", prefix + "msr1 greater than 100"),
-       prefix + "msr1 greater than 100", "case when sum(union_join_ctx_msr1) > 100 then \"high\" else \"low\" end"));
+    exprs.add(new ExprColumn(new FieldSchema(prefix + "msr1_greater_than_100", "int", prefix + "msr1 greater than 100"),
+        prefix + "msr1 greater than 100", "case when sum(union_join_ctx_msr1) > 100 then \"high\" else \"low\" end"));
     exprs.add(new ExprColumn(new FieldSchema(prefix + "non_zero_msr2_sum", "int", prefix + "non zero msr2 sum"),
         prefix + "non zero msr2 sum", "sum(case when union_join_ctx_msr2 > 0 then union_join_ctx_msr2 else 0 end)"));
 

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
index f467755..1e5d05f 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
@@ -108,8 +108,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
           + "from ", null, "group by testcube.cityid",
           getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
     String expectedq4 =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr2) as `sum(testCube.msr2)` " +
-          "from ", null, "group by testcube.cityid having" + " sum(testCube.msr2) > 100",
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr2) as `sum(testCube.msr2)` "
+          + "from ", null, "group by testcube.cityid having" + " sum(testCube.msr2) > 100",
         getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
     String expectedq5 =
       getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr2) as `msr2` from ", null,
@@ -155,8 +155,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
       compareQueries(hql, expected[i]);
     }
     aggregateFactSelectionTests(conf);
-    //TODO union : Fix after CandidateFact deleted
-    //rawFactSelectionTests(getConfWithStorages("C1,C2"));
+    rawFactSelectionTests(getConfWithStorages("C1,C2"));
   }
 
   @Test
@@ -177,7 +176,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
     String query2 = "SELECT count (distinct testcube.cityid) from testcube where " + TWO_DAYS_RANGE;
     String hQL2 = rewrite(query2, conf);
     String expectedQL2 =
-      getExpectedQuery(cubeName, "SELECT count (distinct testcube.cityid) as `count(distinct testcube.cityid)`" + " from ", null, null,
+      getExpectedQuery(cubeName, "SELECT count (distinct testcube.cityid) as `count(distinct testcube.cityid)`"
+          + " from ", null, null,
         getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
     compareQueries(hQL2, expectedQL2);
 
@@ -210,8 +210,6 @@ public class TestAggregateResolver extends TestQueryRewrite {
 
   }
 
-  //TODO union : Fix after CandidateFact deleted
-  /*
   @Test
   public void testAggregateResolverOff() throws ParseException, LensException {
     Configuration conf2 = getConfWithStorages("C1,C2");
@@ -222,11 +220,12 @@ public class TestAggregateResolver extends TestQueryRewrite {
     String query = "SELECT cityid, testCube.msr2 FROM testCube WHERE " + TWO_DAYS_RANGE;
     CubeQueryContext cubeql = rewriteCtx(query, conf2);
     String hQL = cubeql.toHQL();
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    CandidateFact candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    Candidate candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     String expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid, testCube.msr2 from ", null, null,
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, testCube.msr2 as `msr2` from ", null, null,
         getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
     conf2.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
@@ -234,7 +233,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     conf2.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2");
     rawFactSelectionTests(conf2);
   }
-*/
+
   private void aggregateFactSelectionTests(Configuration conf) throws ParseException, LensException {
     String query = "SELECT count(distinct cityid) from testcube where " + TWO_DAYS_RANGE;
     CubeQueryContext cubeql = rewriteCtx(query, conf);
@@ -257,8 +256,8 @@ public class TestAggregateResolver extends TestQueryRewrite {
     cubeql = rewriteCtx(query, conf);
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr2) as `sum(testCube.msr2)` " +
-          "from ", null, "group by testcube.cityid", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr2) as `sum(testCube.msr2)` "
+          + "from ", null, "group by testcube.cityid", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
     compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, sum(testCube.msr2) m2 FROM testCube WHERE " + TWO_DAYS_RANGE + " order by m2";
@@ -273,166 +272,183 @@ public class TestAggregateResolver extends TestQueryRewrite {
     cubeql = rewriteCtx(query, conf);
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr2) as `sum(testCube.msr2)` " +
-          "from ", null, "group by testcube.cityid having max(testcube.msr3) > 100",
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr2) as `sum(testCube.msr2)` "
+          + "from ", null, "group by testcube.cityid having max(testcube.msr3) > 100",
         getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
     compareQueries(hQL, expectedQL);
   }
-  //TODO union : Fix after CandidateFact deleted
-  /*
+
   private void rawFactSelectionTests(Configuration conf) throws ParseException, LensException {
     // Check a query with non default aggregate function
     String query = "SELECT cityid, avg(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE;
     CubeQueryContext cubeql = rewriteCtx(query, conf);
     String hQL = cubeql.toHQL();
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    CandidateFact candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    Candidate candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     String expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid, avg(testCube.msr2) from ", null,
-        "group by testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, avg(testCube.msr2) as `avg(testCube.msr2)` "
+          + "from ", null, "group by testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
 
     // query with measure in a where clause
     query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE testCube.msr1 < 100 and " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid, sum(testCube.msr2) from ", "testcube.msr1 < 100",
-        "group by testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr2) as `sum(testCube.msr2)` "
+          + "from ", "testcube.msr1 < 100", "group by testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, testCube.msr2 FROM testCube WHERE testCube.msr2 < 100 and " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid, testCube.msr2 from ", "testcube.msr2 < 100", null,
-        getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, testCube.msr2 as `msr2` from ",
+          "testcube.msr2 < 100", null, getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " group by testCube.msr1";
     cubeql = rewriteCtx(query, conf);
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid, sum(testCube.msr2) from ", null,
-        " group by testCube.msr1", getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr2)  as `sum(testCube.msr2)` "
+          + "from ", null, " group by testCube.msr1", getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " group by testCube.msr3";
     cubeql = rewriteCtx(query, conf);
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid, sum(testCube.msr2) from ", null,
-        " group by testCube.msr3", getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr2) as `sum(testCube.msr2)` "
+          + "from ", null, " group by testCube.msr3", getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " order by testCube.msr1";
     cubeql = rewriteCtx(query, conf);
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid, sum(testCube.msr2) from ", null,
-        " group by testcube.cityid order by testcube.msr1 asc", getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr2)  as `sum(testCube.msr2)` "
+          + "from ", null, " group by testcube.cityid order by testcube.msr1 asc",
+          getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " order by testCube.msr3";
     cubeql = rewriteCtx(query, conf);
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid, sum(testCube.msr2) from ", null,
-        " group by testcube.cityid order by testcube.msr3 asc", getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr2) as `sum(testCube.msr2)` "
+          + "from ", null, " group by testcube.cityid order by testcube.msr3 asc",
+          getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
 
     query = "SELECT distinct cityid, round(testCube.msr2) from testCube where " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT distinct testcube.cityid, round(testCube.msr2) from ", null, null,
-        getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT distinct testcube.cityid as `cityid`, round(testCube.msr2)  "
+          + "as `round(testCube.msr2)` from ", null, null, getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, count(distinct(testCube.msr2)) from testCube where " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid, count(distinct testCube.msr2) from ", null,
-        "group by testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, count(distinct testCube.msr2) "
+          + "as `count(distinct(testCube.msr2))` from ", null, "group by testcube.cityid",
+          getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
 
     // query with no default aggregate measure
     query = "SELECT cityid, round(testCube.msr1) from testCube where " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid, round(testCube.msr1) from ", null, null,
-        getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, round(testCube.msr1) as `round(testCube.msr1)` "
+          + "from ", null, null, getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
 
     query = "SELECT distinct cityid, round(testCube.msr1) from testCube where " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT distinct testcube.cityid, round(testCube.msr1) from ", null, null,
-        getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT distinct testcube.cityid as `cityid`, round(testCube.msr1) "
+          + "as `round(testCube.msr1)` from ", null, null, getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, count(distinct(testCube.msr1)) from testCube where " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid, count(distinct testCube.msr1) from ", null,
-        "group by testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, count(distinct testCube.msr1) "
+          + "as ` count(distinct testCube.msr1)` from ", null, "group by testcube.cityid",
+          getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, sum(testCube.msr1) from testCube where " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
-    Assert.assertEquals(1, cubeql.getCandidateFacts().size());
-    candidateFact = cubeql.getCandidateFacts().iterator().next();
-    Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
+    Assert.assertEquals(1, cubeql.getCandidates().size());
+    candidate = cubeql.getCandidates().iterator().next();
+    Assert.assertTrue(candidate instanceof StorageCandidate);
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) candidate).getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid, sum(testCube.msr1) from ", null,
-        "group by testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr1) as `sum(testCube.msr1)` "
+          + "from ", null, "group by testcube.cityid",
+          getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
     query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " having max(msr1) > 100";
     cubeql = rewriteCtx(query, conf);
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid, sum(testCube.msr2) from ", null,
-        "group by testcube.cityid having max(testcube.msr1) > 100", getWhereForHourly2days("c1_testfact2_raw"));
+      getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, sum(testCube.msr2) as `sum(testCube.msr2)` "
+          + "from ", null, "group by testcube.cityid having max(testcube.msr1) > 100",
+          getWhereForHourly2days("c1_testfact2_raw"));
     compareQueries(hQL, expectedQL);
   }
-  */
 }


[2/4] lens git commit: Deleted deprecated classes, Fixed Checkstyles, Fixed test cases, Fixed duplicate projections

Posted by pu...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index b367214..8a559e2 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -31,8 +31,6 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.KW_AND;
 import static org.testng.Assert.*;
 
 import java.util.*;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
 import org.apache.lens.api.error.ErrorCollectionFactory;
 import org.apache.lens.cube.error.LensCubeErrorCode;
@@ -52,7 +50,6 @@ import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
 import com.google.common.base.Splitter;
-import com.google.common.collect.Sets;
 import lombok.Getter;
 
 public class TestBaseCubeQueries extends TestQueryRewrite {
@@ -72,73 +69,74 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
 
   @Test
   public void testNoCandidateFactAvailableExceptionCompareTo() throws Exception {
-    //maxCause : COLUMN_NOT_FOUND, Ordinal : 9
+    //maxCause : COLUMN_NOT_FOUND
     NoCandidateFactAvailableException ne1 =(NoCandidateFactAvailableException)
-            getLensExceptionInRewrite("select dim1, test_time_dim, msr3, msr13 from basecube where "
-            + TWO_DAYS_RANGE, conf);
-    //maxCause : FACT_NOT_AVAILABLE_IN_RANGE, Ordinal : 1
+      getLensExceptionInRewrite("select dim1, test_time_dim, msr3, msr13 from basecube where "
+        + TWO_DAYS_RANGE, conf);
+    //maxCause : COLUMN_NOT_FOUND
     NoCandidateFactAvailableException ne2 = (NoCandidateFactAvailableException)
-            getLensExceptionInRewrite("select dim1 from " + cubeName + " where " + LAST_YEAR_RANGE, getConf());
-    assertEquals(ne1.compareTo(ne2), 8);
+      getLensExceptionInRewrite("select dim1 from " + cubeName + " where " + LAST_YEAR_RANGE, getConf());
+    assertEquals(ne1.compareTo(ne2), 0);
   }
 
   @Test
   public void testColumnErrors() throws Exception {
     LensException e;
-
-//    e = getLensExceptionInRewrite("select msr11 + msr2 from basecube" + " where " + TWO_DAYS_RANGE, conf);
-//    e.buildLensErrorResponse(new ErrorCollectionFactory().createErrorCollection(), null, "testid");
-//    assertEquals(e.getErrorCode(),
-//      LensCubeErrorCode.NO_FACT_HAS_COLUMN.getLensErrorInfo().getErrorCode());
-//    assertTrue(e.getMessage().contains("msr11"), e.getMessage());
-//    assertTrue(e.getMessage().contains("msr2"), e.getMessage());
+    e = getLensExceptionInRewrite("select msr11 + msr2 from basecube" + " where " + TWO_DAYS_RANGE, conf);
+    e.buildLensErrorResponse(new ErrorCollectionFactory().createErrorCollection(), null, "testid");
+    assertEquals(e.getErrorCode(),
+      LensCubeErrorCode.NO_FACT_HAS_COLUMN.getLensErrorInfo().getErrorCode());
+    assertTrue(e.getMessage().contains("msr11"), e.getMessage());
+    assertTrue(e.getMessage().contains("msr2"), e.getMessage());
     // no fact has the all the dimensions queried
     e = getLensExceptionInRewrite("select dim1, test_time_dim, msr3, msr13 from basecube where "
       + TWO_DAYS_RANGE, conf);
     assertEquals(e.getErrorCode(),
         LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
-    // TODO union :  Commented below line. With the new changes We are keeping only one
-    // TODO union : datastrucucture for candidates. Hence pruning candidateSet using Candidate is not happening.
-    // TODO union : Exception is thrown in later part of rewrite.
     NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e;
-    PruneCauses.BriefAndDetailedError pruneCauses = ne.getJsonMessage();
-    String regexp = String.format(CandidateTablePruneCause.CandidateTablePruneCode.COLUMN_NOT_FOUND.errorFormat,
-      "Column Sets: (.*?)", "queriable together");
-    Matcher matcher = Pattern.compile(regexp).matcher(pruneCauses.getBrief());
-    assertTrue(matcher.matches(), pruneCauses.getBrief());
-    assertEquals(matcher.groupCount(), 1);
-    String columnSetsStr = matcher.group(1);
-    assertNotEquals(columnSetsStr.indexOf("test_time_dim"), -1, columnSetsStr);
-    assertNotEquals(columnSetsStr.indexOf("msr3, msr13"), -1);
-
-    /**
-     * Verifying the BriefAndDetailedError:
-     * 1. Check for missing columns(COLUMN_NOT_FOUND)
-     *    and check the respective tables for each COLUMN_NOT_FOUND
-     * 2. check for ELEMENT_IN_SET_PRUNED
-     *
-     */
-    boolean columnNotFound = false;
-    List<String> testTimeDimFactTables = Arrays.asList("c1_testfact3_raw_base",
-        "c1_testfact5_base", "c1_testfact6_base", "c1_testfact1_raw_base",
-        "c1_testfact4_raw_base", "c1_testfact3_base");
-    List<String> factTablesForMeasures = Arrays.asList(
-        "c2_testfact2_base","c2_testfact_deprecated","c1_union_join_ctx_fact1","c1_union_join_ctx_fact2",
-        "c1_union_join_ctx_fact3","c1_union_join_ctx_fact5","c1_testfact2_base",
-        "c1_union_join_ctx_fact6","c1_testfact2_raw_base","c1_testfact5_raw_base",
-        "c3_testfact_deprecated","c1_testfact_deprecated","c4_testfact_deprecated",
-        "c3_testfact2_base","c4_testfact2_base");
-    for (Map.Entry<String, List<CandidateTablePruneCause>> entry : pruneCauses.getDetails().entrySet()) {
-      if (entry.getValue().contains(CandidateTablePruneCause.columnNotFound("test_time_dim"))) {
-        columnNotFound = true;
-        compareStrings(testTimeDimFactTables, entry);
-      }
-      if (entry.getValue().contains(CandidateTablePruneCause.columnNotFound("msr3", "msr13"))) {
-        columnNotFound = true;
-        compareStrings(factTablesForMeasures, entry);
-      }
-    }
-    Assert.assertTrue(columnNotFound);
+    //ne.briefAndDetailedError.getBriefCause()
+    //ne.getJsonMessage().brief
+    assertTrue(CandidateTablePruneCode.UNSUPPORTED_STORAGE.errorFormat.equals(ne.getJsonMessage().getBrief()));
+//    PruneCauses.BriefAndDetailedError pruneCauses = ne.getJsonMessage();
+//    String regexp = String.format(CandidateTablePruneCode.UNSUPPORTED_STORAGE.errorFormat,
+//      "Column Sets: (.*?)", "queriable together");
+//    Matcher matcher = Pattern.compile(regexp).matcher(pruneCauses.getBrief());
+//    assertTrue(matcher.matches(), pruneCauses.getBrief());
+//    assertEquals(matcher.groupCount(), 1);
+//    String columnSetsStr = matcher.group(1);
+//    assertNotEquals(columnSetsStr.indexOf("test_time_dim"), -1, columnSetsStr);
+//    assertNotEquals(columnSetsStr.indexOf("msr3, msr13"), -1);
+//
+//    /**
+//     * Verifying the BriefAndDetailedError:
+//     * 1. Check for missing columns(COLUMN_NOT_FOUND)
+//     *    and check the respective tables for each COLUMN_NOT_FOUND
+//     * 2. check for ELEMENT_IN_SET_PRUNED
+//     *
+//     */
+//    boolean columnNotFound = false;
+//    List<String> testTimeDimFactTables = Arrays.asList("c1_testfact3_raw_base",
+//        "c1_testfact5_base", "c1_testfact6_base", "c1_testfact1_raw_base",
+//        "c1_testfact4_raw_base", "c1_testfact3_base");
+//    List<String> factTablesForMeasures = Arrays.asList(
+//        "c2_testfact2_base","c2_testfact_deprecated","c1_union_join_ctx_fact1","c1_union_join_ctx_fact2",
+//        "c1_union_join_ctx_fact3","c1_union_join_ctx_fact5","c1_testfact2_base",
+//        "c1_union_join_ctx_fact6","c1_testfact2_raw_base","c1_testfact5_raw_base",
+//        "c3_testfact_deprecated","c1_testfact_deprecated","c4_testfact_deprecated",
+//        "c3_testfact2_base","c4_testfact2_base");
+//    for (Map.Entry<String, List<CandidateTablePruneCause>> entry : pruneCauses.getDetails().entrySet()) {
+//      if (entry.getValue().contains(CandidateTablePruneCause.columnNotFound(
+//          CandidateTablePruneCode.COLUMN_NOT_FOUND, "test_time_dim"))) {
+//        columnNotFound = true;
+//        compareStrings(testTimeDimFactTables, entry);
+//      }
+//      if (entry.getValue().contains(CandidateTablePruneCause.columnNotFound(
+//          CandidateTablePruneCode.COLUMN_NOT_FOUND, "msr3", "msr13"))) {
+//        columnNotFound = true;
+//        compareStrings(factTablesForMeasures, entry);
+//      }
+//    }
+//    Assert.assertTrue(columnNotFound);
  //   assertEquals(pruneCauses.getDetails().get("testfact1_base"),
  //     Arrays.asList(new CandidateTablePruneCause(CandidateTablePruneCode.ELEMENT_IN_SET_PRUNED)));
   }
@@ -190,72 +188,64 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
           " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     compareQueries(hqlQuery, expected);
   }
-  // TODO union : Fix after CandidateFact deleted
-  /*
+
   @Test
   public void testMultiFactQueryWithNoDimensionsSelected() throws Exception {
     CubeQueryContext ctx = rewriteCtx("select roundedmsr2, msr12 from basecube" + " where " + TWO_DAYS_RANGE, conf);
-    Set<String> candidateFacts = new HashSet<String>();
-    for (CandidateFact cfact : ctx.getCandidateFacts()) {
-      candidateFacts.add(cfact.getName().toLowerCase());
+    Set<String> storageCandidates = new HashSet<String>();
+    Set<StorageCandidate> scSet = CandidateUtil.getStorageCandidates(ctx.getCandidates());
+    for (StorageCandidate sc : scSet) {
+      storageCandidates.add(sc.getName());
     }
-    Assert.assertTrue(candidateFacts.contains("testfact1_base"));
-    Assert.assertTrue(candidateFacts.contains("testfact2_base"));
+    Assert.assertTrue(storageCandidates.contains("c1_testfact1_base"));
+    Assert.assertTrue(storageCandidates.contains("c1_testfact2_base"));
     String hqlQuery = ctx.toHQL();
     String expected1 =
-      getExpectedQuery(cubeName, "select sum(basecube.msr12) as `msr12` FROM ", null,
+      getExpectedQuery(cubeName, "SELECT sum(0.0) as `alias0`, sum((basecube.msr12)) as `alias1` FROM ", null,
         null, getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     String expected2 =
-      getExpectedQuery(cubeName, "select round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", null,
+      getExpectedQuery(cubeName, "SELECT sum((basecube.msr2)) as `alias0`, sum(0.0) as `alias1` FROM ", null,
         null, getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
     String lower = hqlQuery.toLowerCase();
-    assertTrue(lower.startsWith("select mq2.roundedmsr2 roundedmsr2, mq1.msr12 msr12 from ")
-      || lower.startsWith("select mq1.roundedmsr2 roundedmsr2, mq2.msr12 msr12 from "), hqlQuery);
-    assertTrue(lower.contains("mq1 full outer join") && lower.endsWith("mq2"), hqlQuery);
-    assertFalse(lower.contains("mq2 on"), hqlQuery);
-    assertFalse(lower.contains("<=>"), hqlQuery);
+    assertTrue(lower.startsWith("select round((sum((basecube.alias0)) / 1000)) as `roundedmsr2`, "
+        + "sum((basecube.alias1)) as `msr12` from "), hqlQuery);
+    assertFalse(lower.contains("UNION ALL"), hqlQuery);
   }
-*/
 
-  // TODO union : Fix after CandidateFact deleted
-  /*
   @Test
   public void testMoreThanTwoFactQueryWithNoDimensionsSelected() throws Exception {
     CubeQueryContext ctx = rewriteCtx("select roundedmsr2, msr14, msr12 from basecube" + " where " + TWO_DAYS_RANGE,
       conf);
-    Set<String> candidateFacts = new HashSet<String>();
-    for (CandidateFact cfact : ctx.getCandidateFacts()) {
-      candidateFacts.add(cfact.getName().toLowerCase());
+    Set<String> storageCandidates = new HashSet<String>();
+    Set<StorageCandidate> scSet = CandidateUtil.getStorageCandidates(ctx.getCandidates());
+    for (StorageCandidate sc : scSet) {
+      storageCandidates.add(sc.getName());
     }
-    Assert.assertEquals(candidateFacts.size(), 3);
-    Assert.assertTrue(candidateFacts.contains("testfact1_base"));
-    Assert.assertTrue(candidateFacts.contains("testfact2_base"));
-    Assert.assertTrue(candidateFacts.contains("testfact3_base"));
+    Assert.assertEquals(storageCandidates.size(), 3);
+    Assert.assertTrue(storageCandidates.contains("c1_testfact1_base"));
+    Assert.assertTrue(storageCandidates.contains("c1_testfact2_base"));
+    Assert.assertTrue(storageCandidates.contains("c1_testfact3_base"));
     String hqlQuery = ctx.toHQL();
-    String expected1 = getExpectedQuery(cubeName, "select sum(basecube.msr12) as `msr12` FROM ", null, null,
+    String expected1 = getExpectedQuery(cubeName, "SELECT sum(0.0) as `alias0`, count(0.0) as `alias1`, "
+        + "sum((basecube.msr12)) as `alias2` FROM ", null, null,
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
-    String expected2 = getExpectedQuery(cubeName, "select round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", null,
+    String expected2 = getExpectedQuery(cubeName, "SELECT sum((basecube.msr2)) as `alias0`, count(0.0) as `alias1`, "
+        + "sum(0.0) as `alias2` FROM ", null,
       null, getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
-    String expected3 = getExpectedQuery(cubeName, "select count((basecube.msr14)) as `msr14` FROM ", null, null,
+    String expected3 = getExpectedQuery(cubeName, "SELECT sum(0.0) as `alias0`, count((basecube.msr14)) as `alias1`, "
+        + "sum(0.0) as `alias2` FROM ", null, null,
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact3_BASE"));
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
     compareContains(expected3, hqlQuery);
     String lower = hqlQuery.toLowerCase();
-    assertTrue(lower.startsWith("select mq1.roundedmsr2 roundedmsr2, mq3.msr14 msr14, mq2.msr12 msr12 from ") || lower
-      .startsWith("select mq3.roundedmsr2 roundedmsr2, mq1.msr14 msr14, mq2.msr12 msr12 from ") || lower
-      .startsWith("select mq2.roundedmsr2 roundedmsr2, mq3.msr14 msr14, mq1.msr12 msr12 from ") || lower
-      .startsWith("select mq3.roundedmsr2 roundedmsr2, mq2.msr14 msr14, mq1.msr12 msr12 from ") || lower
-      .startsWith("select mq1.roundedmsr2 roundedmsr2, mq2.msr14 msr14, mq3.msr12 msr12 from ") || lower
-      .startsWith("select mq2.roundedmsr2 roundedmsr2, mq1.msr14 msr14, mq3.msr12 msr12 from "), hqlQuery);
-    assertTrue(lower.contains("mq1 full outer join") && lower.endsWith("mq3"));
-    assertFalse(lower.contains("mq3 on"), hqlQuery);
-    assertFalse(lower.contains("mq2 on"), hqlQuery);
-    assertFalse(lower.contains("<=>"), hqlQuery);
+    assertTrue(lower.startsWith("select round((sum((basecube.alias0)) / 1000)) as `roundedmsr2`, "
+        + "count((basecube.alias1)) as `msr14`, sum((basecube.alias2)) as `msr12` from"), hqlQuery);
+    assertTrue(lower.contains("union all"));
   }
-*/
+
   @Test
   public void testMultiFactQueryWithSingleCommonDimension() throws Exception {
     String hqlQuery = rewrite("select dim1, roundedmsr2, msr12 from basecube" + " where " + TWO_DAYS_RANGE, conf);
@@ -281,8 +271,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     Configuration tConf = new Configuration(conf);
     tConf.setBoolean(CubeQueryConfUtil.LIGHTEST_FACT_FIRST, true);
     String hqlQuery = rewrite("select dim1, roundedmsr2, msr12 from basecube" + " where " + TWO_DAYS_RANGE, tConf);
-    String expected1 = getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, sum(0.0) as `alias1`, " +
-          "sum((basecube.msr12)) as `alias2` FROM ", null, " group by basecube.dim1",
+    String expected1 = getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, sum(0.0) as `alias1`, "
+        + "sum((basecube.msr12)) as `alias2` FROM ", null, " group by basecube.dim1",
         getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     String expected2 = getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, sum((basecube.msr2)) "
         + "as `alias1`, sum(0.0) as `alias2` FROM ", null, " group by basecube.dim1",
@@ -290,8 +280,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
     String lower = hqlQuery.toLowerCase();
-    assertTrue(lower.startsWith("select (basecube.alias0) as `dim1`, round((sum((basecube.alias1)) / 1000)) " +
-        "as `roundedmsr2`, sum((basecube.alias2)) as `msr12` from"), hqlQuery);
+    assertTrue(lower.startsWith("select (basecube.alias0) as `dim1`, round((sum((basecube.alias1)) / 1000)) "
+        + "as `roundedmsr2`, sum((basecube.alias2)) as `msr12` from"), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL") && hqlQuery.endsWith("GROUP BY (basecube.alias0)"),
       hqlQuery);
   }
@@ -354,8 +344,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
             + "sum((basecube.msr2)) as `alias3`, max(0.0) as `alias4`, max((basecube.msr3)) as `alias5` FROM ", null,
         " group by basecube.dim1, (basecube.d_time)", getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
     String expected3 =
-      getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, (basecube.d_time) as `alias1`, sum(0.0) " +
-          "as `alias2`, sum(0.0) as `alias3`, max((basecube.msr13)) as `alias4`, max(0.0) as `alias5` FROM ", null,
+      getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, (basecube.d_time) as `alias1`, sum(0.0) "
+          + "as `alias2`, sum(0.0) as `alias3`, max((basecube.msr13)) as `alias4`, max(0.0) as `alias5` FROM ", null,
         " group by basecube.dim1, (basecube.d_time)", getWhereForDailyAndHourly2days(cubeName, "c1_testfact3_base"));
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
@@ -403,8 +393,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
-      "select (basecube.alias0) as `dim1`, (basecube.alias1) as `msr11`, " +
-          "(basecube.alias2) as `roundedmsr2` from"), hqlQuery);
+      "select (basecube.alias0) as `dim1`, (basecube.alias1) as `msr11`, "
+          + "(basecube.alias2) as `roundedmsr2` from"), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL") && hqlQuery.endsWith("as basecube"),
       hqlQuery);
   }
@@ -463,8 +453,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
           + "sum(0.0) as `alias2` FROM ", null, " group by basecube.dim1",
           getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     String expected2 =
-      getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, sum(0.0) as `alias1`, sum((basecube.msr2)) " +
-          "as `alias2` FROM ", null, " group by basecube.dim1",
+      getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, sum(0.0) as `alias1`, sum((basecube.msr2)) "
+          + "as `alias2` FROM ", null, " group by basecube.dim1",
           getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
@@ -486,14 +476,14 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
           + "sum(0.0) as `alias2` FROM", null, " group by basecube.dim1",
           getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     String expected2 =
-      getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, sum(0.0) as `alias1`, sum((basecube.msr2)) " +
-          "as `alias2` FROM", null,
+      getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, sum(0.0) as `alias1`, sum((basecube.msr2)) "
+          + "as `alias2` FROM", null,
           " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
-        "select (basecube.alias0) as `d1`, sum((basecube.alias1)) as `my msr12`, round((sum((basecube.alias2)) / 1000)) " +
-            "as `roundedmsr2` from"), hqlQuery);
+        "select (basecube.alias0) as `d1`, sum((basecube.alias1)) as `my msr12`, "
+            + "round((sum((basecube.alias2)) / 1000)) as `roundedmsr2` from"), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL") && hqlQuery.endsWith("GROUP BY (basecube.alias0)"),
       hqlQuery);
   }
@@ -515,7 +505,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith("select (basecube.alias0) as `reverse(dim1)`, (basecube.alias1) "
-        + "as `ltrim(dim1)`, sum((basecube.alias2)) as `msr12`, round((sum((basecube.alias3)) / 1000)) as `roundedmsr2` from"),
+        + "as `ltrim(dim1)`, sum((basecube.alias2)) as `msr12`, round((sum((basecube.alias3)) / 1000)) "
+        + "as `roundedmsr2` from"),
       hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL")
       && hqlQuery.endsWith("GROUP BY (basecube.alias0), (basecube.alias1)"), hqlQuery);
@@ -553,8 +544,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
           + " avg(0.0) as `alias2` FROM  ", null, " group by basecube.dim1",
           getWhereForHourly2days(cubeName, "C1_testfact2_raw_base"));
     String expected2 =
-      getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, avg(0.0) as `alias1`, avg((basecube.msr2)) " +
-          "as `alias2` FROM ", null, " group by basecube.dim1",
+      getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, avg(0.0) as `alias1`, avg((basecube.msr2)) "
+          + "as `alias2` FROM ", null, " group by basecube.dim1",
           getWhereForHourly2days(cubeName, "C1_testfact1_raw_base"));
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
@@ -615,8 +606,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     // query with denorm variable
     String hqlQuery = rewrite("select dim2, msr13, roundedmsr2 from basecube where dim2 == 10 and " + TWO_DAYS_RANGE,
       conf);
-    String expected1 = getExpectedQuery(cubeName, "SELECT (dim2chain.id) as `alias0`, max((basecube.msr13)) " +
-        "as `alias1`, sum(0.0) as `alias2` FROM ", " JOIN " + getDbName()
+    String expected1 = getExpectedQuery(cubeName, "SELECT (dim2chain.id) as `alias0`, max((basecube.msr13)) "
+        + "as `alias1`, sum(0.0) as `alias2` FROM ", " JOIN " + getDbName()
         + "c1_testdim2tbl dim2chain ON basecube.dim12 = "
         + " dim2chain.id and (dim2chain.dt = 'latest') ", "dim2chain.id == 10", " group by dim2chain.id", null,
       getWhereForHourly2days(cubeName, "C1_testFact3_RAW_BASE"));
@@ -627,12 +618,12 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
-      "select (basecube.alias0) as `dim2`, max((basecube.alias1)) as `msr13`, " +
-          "round((sum((basecube.alias2)) / 1000)) as `roundedmsr2` from"), hqlQuery);
+      "select (basecube.alias0) as `dim2`, max((basecube.alias1)) as `msr13`, "
+          + "round((sum((basecube.alias2)) / 1000)) as `roundedmsr2` from"), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL") && hqlQuery.endsWith("GROUP BY (basecube.alias0)"),
       hqlQuery);
   }
-  //TODO union : Wrong fact picked
+
   @Test
   public void testMultiFactQueryWithExpressionInvolvingDenormVariable() throws Exception {
     // query with expression
@@ -642,8 +633,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
         "select booleancut, round(sum(msr2)/1000), avg(msr13 + msr14) from basecube where " + TWO_DAYS_RANGE,
         conf);
     String expected1 =
-      getExpectedQuery(cubeName, "SELECT (((basecube.dim1) != 'x') and ((dim2chain.id) != 10)) as `alias0`, " +
-          "sum(0.0) as `alias1`, avg(((basecube.msr13) + (basecube.msr14))) as `alias2` FROM ", " JOIN "
+      getExpectedQuery(cubeName, "SELECT (((basecube.dim1) != 'x') and ((dim2chain.id) != 10)) as `alias0`, "
+          + "sum(0.0) as `alias1`, avg(((basecube.msr13) + (basecube.msr14))) as `alias2` FROM ", " JOIN "
           + getDbName() + "c1_testdim2tbl dim2chain ON basecube.dim12 = "
           + " dim2chain.id and (dim2chain.dt = 'latest') ", null,
         " group by basecube.dim1 != 'x' AND dim2chain.id != 10", null,
@@ -672,8 +663,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
         "select booleancut, round(sum(msr2)/1000), avg(msr13 + msr14) from basecube where booleancut == 'true' and "
           + TWO_DAYS_RANGE, conf);
     String expected1 =
-      getExpectedQuery(cubeName, "SELECT (((basecube.dim1) != 'x') and ((dim2chain.id) != 10)) as `alias0`, " +
-          "sum(0.0) as `alias1`, avg(((basecube.msr13) + (basecube.msr14))) as `alias2` FROM ", " JOIN " + getDbName()
+      getExpectedQuery(cubeName, "SELECT (((basecube.dim1) != 'x') and ((dim2chain.id) != 10)) as `alias0`, "
+          + "sum(0.0) as `alias1`, avg(((basecube.msr13) + (basecube.msr14))) as `alias2` FROM ", " JOIN " + getDbName()
           + "c1_testdim2tbl dim2chain ON basecube.dim12 = " + " dim2chain.id and (dim2chain.dt = 'latest') ",
         "(basecube.dim1 != 'x' AND dim2chain.id != 10) == true",
         " group by basecube.dim1 != 'x' AND dim2chain.id != 10", null,
@@ -686,9 +677,9 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
         getWhereForHourly2days(cubeName, "C1_testfact1_raw_base"));
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
-    assertTrue(hqlQuery.toLowerCase().startsWith("select (basecube.alias0) as `booleancut`, " +
-        "round((sum((basecube.alias1)) / 1000)) as `round((sum(msr2) / 1000))`, " +
-        "avg((basecube.alias2)) as `avg((msr13 + msr14))` from"), hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select (basecube.alias0) as `booleancut`, "
+        + "round((sum((basecube.alias1)) / 1000)) as `round((sum(msr2) / 1000))`, "
+        + "avg((basecube.alias2)) as `avg((msr13 + msr14))` from"), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL")
         && hqlQuery.endsWith("GROUP BY (basecube.alias0)"),
       hqlQuery);
@@ -702,11 +693,11 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
       rewrite("select booleancut, round(sum(msr2)/1000), msr13 from basecube where " + TWO_DAYS_RANGE, tconf);
     String expected1 =
       getExpectedQuery(cubeName, "SELECT (basecube.booleancut) as `alias0`, sum(0.0) as `alias1`, "
-          + "max((basecube.msr13)) as `alias2` FROM", null, " " +
-          "group by basecube.booleancut", getWhereForDailyAndHourly2days(cubeName, "C1_testfact6_base"));
+          + "max((basecube.msr13)) as `alias2` FROM", null, " "
+          + "group by basecube.booleancut", getWhereForDailyAndHourly2days(cubeName, "C1_testfact6_base"));
     String expected2 =
-      getExpectedQuery(cubeName, "SELECT (basecube.booleancut) as `alias0`, sum((basecube.msr2)) as `alias1`, " +
-          "max(0.0) as `alias2` FROM ", null, " group by basecube.booleancut",
+      getExpectedQuery(cubeName, "SELECT (basecube.booleancut) as `alias0`, sum((basecube.msr2)) as `alias1`, "
+          + "max(0.0) as `alias2` FROM ", null, " group by basecube.booleancut",
         getWhereForDailyAndHourly2days(cubeName, "C1_testfact5_base"));
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
@@ -733,8 +724,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
         getWhereForHourly2days(cubeName, "c1_testfact1_raw_base"));
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
-    assertTrue(hqlQuery.toLowerCase().startsWith("select sum((basecube.alias0)) as `case_expr`, sum((basecube.alias1)) "
-        + "as `sum(msr1)` from "), hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select sum((basecube.alias0)) as `case_expr`, "
+        + "sum((basecube.alias1)) as `sum(msr1)` from "), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL") && hqlQuery.endsWith("basecube"), hqlQuery);
   }
 
@@ -766,7 +757,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
         + "where " + TWO_DAYS_RANGE, tconf);
     String expected1 =
       getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, sum(case  when ((basecube.dim13) = 'x') "
-          + "then (basecube.msr12) else 0 end) as `alias1`, sum(0.0) as `alias2` FROM ", null, " group by basecube.dim1 ",
+          + "then (basecube.msr12) else 0 end) as `alias1`, sum(0.0) as `alias2` FROM ", null,
+          " group by basecube.dim1 ",
         getWhereForHourly2days(cubeName, "C1_testfact2_raw_base"));
     String expected2 =
       getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, sum(0.0) as `alias1`, sum((basecube.msr1)) "
@@ -775,8 +767,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
-      "select (basecube.alias0) as `dim1`, sum((basecube.alias1)) as `case_expr`, " +
-          "sum((basecube.alias2)) as `sum(msr1)` from"), hqlQuery);
+      "select (basecube.alias0) as `dim1`, sum((basecube.alias1)) as `case_expr`, "
+          + "sum((basecube.alias2)) as `sum(msr1)` from"), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL") && hqlQuery.endsWith("GROUP BY (basecube.alias0)"),
       hqlQuery);
   }
@@ -787,7 +779,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String hqlQuery =
       rewrite("select sum(case when dim13 = 'x' then msr12 else 0 end) as case_expr, sum(msr1) from basecube "
         + "where "
-        + TWO_DAYS_RANGE + " having sum(case when dim13 = 'x' then msr12 else 0 end) > 100 and sum(msr1) > 500", tconf);
+        + TWO_DAYS_RANGE + " having sum(case when dim13 = 'x' then msr12 else 0 end) > 100 "
+        + "and sum(msr1) > 500", tconf);
     String expected1 =
       getExpectedQuery(cubeName, "SELECT sum(case  when ((basecube.dim13) = 'x') then (basecube.msr12) else 0 end) "
           + "as `alias0`, sum(0.0) as `alias1` FROM ", null, "",
@@ -808,11 +801,11 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     Configuration tconf = new Configuration(conf);
     String hqlQuery =
       rewrite("select dim1, sum(case when dim13 = 'x' then msr12 else 0 end) as case_expr, sum(msr1) from basecube "
-        + "where "
-        + TWO_DAYS_RANGE + " having sum(case when dim13 = 'x' then msr12 else 0 end) > 100 and sum(msr1) > 500", tconf);
+        + "where " + TWO_DAYS_RANGE + " having sum(case when dim13 = 'x' then msr12 else 0 end) > 100 "
+          + "and sum(msr1) > 500", tconf);
     String expected1 =
-      getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, sum(case  when ((basecube.dim13) = 'x') then " +
-          "(basecube.msr12) else 0 end) as `alias1`, sum(0.0) as `alias2` FROM", null, " group by basecube.dim1",
+      getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, sum(case  when ((basecube.dim13) = 'x') then "
+          + "(basecube.msr12) else 0 end) as `alias1`, sum(0.0) as `alias2` FROM", null, " group by basecube.dim1",
           getWhereForHourly2days(cubeName, "C1_testfact2_raw_base"));
     String expected2 =
       getExpectedQuery(cubeName, "SELECT (basecube.dim1) as `alias0`, sum(0.0) as `alias1`, sum((basecube.msr1)) "
@@ -827,8 +820,6 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
         && hqlQuery.endsWith("HAVING ((sum((basecube.alias1)) > 100) and (sum((basecube.alias2)) > 500))"), hqlQuery);
   }
 
-  // TODO union : Fix after MaxCoveringSet resolver
-  /*
   @Test
   public void testFallbackPartCol() throws Exception {
     Configuration conf = getConfWithStorages("C1");
@@ -836,7 +827,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String hql, expected;
     // Prefer fact that has a storage with part col on queried time dim
     hql = rewrite("select msr12 from basecube where " + TWO_DAYS_RANGE, conf);
-    expected = getExpectedQuery(BASE_CUBE_NAME, "select sum(basecube.msr12) FROM ", null, null,
+    expected = getExpectedQuery(BASE_CUBE_NAME, "select sum(basecube.msr12) as `msr12` FROM ", null, null,
       getWhereForDailyAndHourly2days(BASE_CUBE_NAME, "c1_testfact2_base"));
     compareQueries(hql, expected);
 
@@ -848,10 +839,10 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) exc;
     PruneCauses.BriefAndDetailedError pruneCause = ne.getJsonMessage();
     assertTrue(pruneCause.getBrief().contains("Missing partitions"));
-    assertEquals(pruneCause.getDetails().get("testfact2_base").iterator().next().getCause(), MISSING_PARTITIONS);
-    assertEquals(pruneCause.getDetails().get("testfact2_base").iterator().next().getMissingPartitions().size(), 1);
+    assertEquals(pruneCause.getDetails().get("c4_testfact2_base").iterator().next().getCause(), MISSING_PARTITIONS);
+    assertEquals(pruneCause.getDetails().get("c4_testfact2_base").iterator().next().getMissingPartitions().size(), 1);
     assertEquals(
-      pruneCause.getDetails().get("testfact2_base").iterator().next().getMissingPartitions().iterator().next(),
+      pruneCause.getDetails().get("c4_testfact2_base").iterator().next().getMissingPartitions().iterator().next(),
       "ttd:["
         + UpdatePeriod.SECONDLY.format(DateUtils.addDays(DateUtils.truncate(TWODAYS_BACK, Calendar.HOUR), -10))
         + ", " + UpdatePeriod.SECONDLY.format(DateUtils.addDays(DateUtils.truncate(NOW, Calendar.HOUR), 10))
@@ -871,7 +862,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
           DateUtils.addDays(TWODAYS_BACK, -5))))) + "' and "
         + "basecube.processing_time < '" + HIVE_QUERY_DATE_PARSER.get().format(ABSDATE_PARSER.get().parse(
           getAbsDateFormatString(getDateUptoHours(DateUtils.addDays(NOW, 5)))));
-    expected = getExpectedQuery(BASE_CUBE_NAME, "select sum(basecube.msr12) FROM ", null,
+    expected = getExpectedQuery(BASE_CUBE_NAME, "select sum(basecube.msr12) as `msr12` FROM ", null,
         " and " + dTimeWhereClause + " and " + pTimeWhereClause,
       getWhereForDailyAndHourly2daysWithTimeDim(BASE_CUBE_NAME, "ttd",
         DateUtils.addDays(TWODAYS_BACK, -10), DateUtils.addDays(NOW, 10), "c4_testfact2_base"));
@@ -880,33 +871,26 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     // Multiple timedims in single query. test that
     CubeQueryContext ctx =
       rewriteCtx("select msr12 from basecube where " + TWO_DAYS_RANGE + " and " + TWO_DAYS_RANGE_TTD, conf);
-    assertEquals(ctx.getCandidateFactSets().size(), 1);
-    assertEquals(ctx.getCandidateFactSets().iterator().next().size(), 1);
-    CandidateFact cfact = ctx.getCandidateFactSets().iterator().next().iterator().next();
-
-    assertEquals(cfact.getRangeToStoragePartMap().size(), 2);
-    Set<String> storages = Sets.newHashSet();
-    for(Map<String, String> entry: cfact.getRangeToStorageWhereMap().values()) {
-      storages.addAll(entry.keySet());
-    }
-    assertEquals(storages.size(), 1);
-    String storage = storages.iterator().next();
-    for(Map.Entry<TimeRange, Map<String, String>> entry: cfact.getRangeToStorageWhereMap().entrySet()) {
+    assertEquals(ctx.getCandidates().size(), 1);
+    assertEquals(CandidateUtil.getStorageCandidates(ctx.getCandidates().iterator().next()).size(), 1);
+    StorageCandidate sc = CandidateUtil.getStorageCandidates(ctx.getCandidates().iterator().next()).iterator().next();
+    assertEquals(sc.getRangeToWhere().size(), 2);
+    for(Map.Entry<TimeRange, String> entry: sc.getRangeToWhere().entrySet()) {
       if (entry.getKey().getPartitionColumn().equals("dt")) {
-        ASTNode parsed = HQLParser.parseExpr(entry.getValue().get(storage));
+        ASTNode parsed = HQLParser.parseExpr(entry.getValue());
         assertEquals(parsed.getToken().getType(), KW_AND);
-        assertTrue(entry.getValue().get(storage).substring(((CommonToken) parsed.getToken()).getStopIndex() + 1)
+        assertTrue(entry.getValue().substring(((CommonToken) parsed.getToken()).getStopIndex() + 1)
           .toLowerCase().contains(dTimeWhereClause));
-        assertFalse(entry.getValue().get(storage).substring(0, ((CommonToken) parsed.getToken()).getStartIndex())
+        assertFalse(entry.getValue().substring(0, ((CommonToken) parsed.getToken()).getStartIndex())
           .toLowerCase().contains("and"));
       } else if (entry.getKey().getPartitionColumn().equals("ttd")) {
-        assertFalse(entry.getValue().get(storage).toLowerCase().contains("and"));
+        assertFalse(entry.getValue().toLowerCase().contains("and"));
       } else {
         throw new LensException("Unexpected");
       }
     }
   }
-  */
+
   @Test
   public void testMultiFactQueryWithHaving() throws Exception {
 
@@ -918,8 +902,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     hqlQuery = rewrite("select dim1, dim11, msr12 from basecube where " + TWO_DAYS_RANGE
       + "having roundedmsr2 > 0", conf);
     expected1 = getExpectedQuery(cubeName,
-      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum((basecube.msr12)) " +
-          "as `alias2`, sum(0.0) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
+      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum((basecube.msr12)) "
+          + "as `alias2`, sum(0.0) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     expected2 = getExpectedQuery(cubeName,
       "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, "
@@ -941,8 +925,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
           + "sum(0.0) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     expected2 = getExpectedQuery(cubeName,
-      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, " +
-          "sum((basecube.msr2)) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
+      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, "
+          + "sum((basecube.msr2)) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
 
     compareContains(expected1, hqlQuery);
@@ -953,59 +937,12 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     assertTrue(hqlQuery.endsWith("HAVING ((sum((basecube.alias2)) > 2) "
         + "and (round((sum((basecube.alias3)) / 1000)) > 0))"));
 
-    // Two having clauses and one complex expression in having which needs to be split over the two facts
-    // And added as where clause outside
-    //TODO union : floor is not a valid function.
-    /*
-    (((tok_function(sum((basecube.msr12))) + round((sum((basecube.alias3)) / 1000))) <= 1000)
-     and (sum((basecube.alias2)) > 2) and (round((sum((basecube.alias3)) / 1000)) > 0))
-           <= [LESSTHANOREQUALTO] (l3c1p145) {
-        + [PLUS] (l4c1p132) {
-          TOK_FUNCTION [TOK_FUNCTION] (l5c1p0) {
-            TOK_FUNCTION [TOK_FUNCTION] (l6c1p0) {
-              SUM [Identifier] (l7c1p0)$
-              . [DOT] (l7c2p0) {
-                TOK_TABLE_OR_COL [TOK_TABLE_OR_COL] (l8c1p0) {
-                  basecube [Identifier] (l9c1p0)$
-                }
-                alias2 [Identifier] (l8c2p0)$
-              }
-            }
-            TOK_FUNCTION [TOK_FUNCTION] (l6c2p0) {
-              SUM [Identifier] (l7c1p0)$
-              . [DOT] (l7c2p0) {
-                TOK_TABLE_OR_COL [TOK_TABLE_OR_COL] (l8c1p0) {
-                  basecube [Identifier] (l9c1p0)$
-                }
-                msr12 [Identifier] (l8c2p0)$
-              }
-            }
-          }
-          TOK_FUNCTION [TOK_FUNCTION] (l5c2p0) {
-            round [Identifier] (l6c1p0)$
-            / [DIVIDE] (l6c2p10) {
-              TOK_FUNCTION [TOK_FUNCTION] (l7c1p0) {
-                SUM [Identifier] (l8c1p0)$
-                . [DOT] (l8c2p0) {
-                  TOK_TABLE_OR_COL [TOK_TABLE_OR_COL] (l9c1p0) {
-                    basecube [Identifier] (l10c1p0)$
-                  }
-                  alias3 [Identifier] (l9c2p0)$
-                }
-              }
-              1000 [Number] (l7c2p11)$
-            }
-          }
-        }
-        1000 [Number] (l4c2p148)$
-      }
-     */
     hqlQuery = rewrite("select dim1, dim11, msr12, roundedmsr2 from basecube where " + TWO_DAYS_RANGE
-      + "having msr12+roundedmsr2 <= 1000 and msr12 > 2 and roundedmsr2 > 0", conf);
-        expected1 = getExpectedQuery(cubeName,
-      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum((basecube.msr12)) as `alias2`, "
-          + "sum(0.0) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
-      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+        + "having msr12+roundedmsr2 <= 1000 and msr12 > 2 and roundedmsr2 > 0", conf);
+    expected1 = getExpectedQuery(cubeName,
+        "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum((basecube.msr12)) as `alias2`, "
+            + "sum(0.0) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
+        getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
 
     expected2 = getExpectedQuery(cubeName,
         "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, "
@@ -1019,17 +956,16 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     assertTrue(hqlQuery.endsWith("(((sum((basecube.alias2)) + round((sum((basecube.alias3)) / 1000))) <= 1000) "
         + "and (sum((basecube.alias2)) > 2) and (round((sum((basecube.alias3)) / 1000)) > 0))"), hqlQuery);
 
-    // TODO union : why?, columns are projected can't be part of having!
     // No push-down-able having clauses.
     hqlQuery = rewrite("select dim1, dim11, msr12, roundedmsr2 from basecube where " + TWO_DAYS_RANGE
       + "having msr12+roundedmsr2 <= 1000", conf);
     expected1 = getExpectedQuery(cubeName,
-      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum((basecube.msr12)) as `alias2`, " +
-          "sum(0.0) as `alias3` FROM", null, " group by basecube.dim1, basecube.dim11",
+      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum((basecube.msr12)) as `alias2`, "
+          + "sum(0.0) as `alias3` FROM", null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     expected2 = getExpectedQuery(cubeName,
-      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, sum((basecube.msr2)) " +
-          "as `alias3` FROM", null, " group by basecube.dim1, basecube.dim11",
+      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, sum((basecube.msr2)) "
+          + "as `alias3` FROM", null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
 
     compareContains(expected1, hqlQuery);
@@ -1037,28 +973,29 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     assertTrue(hqlQuery.toLowerCase().startsWith("select (basecube.alias0) as `dim1`, (basecube.alias1) as `dim11`, "
         + "sum((basecube.alias2)) as `msr12`, round((sum((basecube.alias3)) / 1000)) as `roundedmsr2` from"), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL")
-      && hqlQuery.endsWith("HAVING ((sum((basecube.alias2)) + " +
-        "round((sum((basecube.alias3)) / 1000))) <= 1000)"), hqlQuery);
+      && hqlQuery.endsWith("HAVING ((sum((basecube.alias2)) + "
+        + "round((sum((basecube.alias3)) / 1000))) <= 1000)"), hqlQuery);
 
     // function over expression of two functions over measures
     hqlQuery = rewrite("select dim1, dim11, msr12, roundedmsr2 from basecube where " + TWO_DAYS_RANGE
       + "having round(msr12+roundedmsr2) <= 1000", conf);
     expected1 = getExpectedQuery(cubeName,
-      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum((basecube.msr12)) as `alias2`, " +
-          "sum(0.0) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
+      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum((basecube.msr12)) as `alias2`, "
+          + "sum(0.0) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     expected2 = getExpectedQuery(cubeName,
-      " SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, " +
-          "sum((basecube.msr2)) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
+      " SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, "
+          + "sum((basecube.msr2)) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
 
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith("select (basecube.alias0) as `dim1`, (basecube.alias1) as `dim11`, "
-        + "sum((basecube.alias2)) as `msr12`, round((sum((basecube.alias3)) / 1000)) as `roundedmsr2` from"), hqlQuery);
+        + "sum((basecube.alias2)) as `msr12`, round((sum((basecube.alias3)) / 1000)) "
+        + "as `roundedmsr2` from"), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL")
-      && hqlQuery.endsWith(" HAVING (round((sum((basecube.alias2)) + " +
-        "round((sum((basecube.alias3)) / 1000)))) <= 1000)"), hqlQuery);
+      && hqlQuery.endsWith(" HAVING (round((sum((basecube.alias2)) + "
+        + "round((sum((basecube.alias3)) / 1000)))) <= 1000)"), hqlQuery);
 
 
     // Following test cases only select dimensions, and all the measures are in having.
@@ -1072,8 +1009,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
           + "sum(0.0) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     expected2 = getExpectedQuery(cubeName,
-      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, " +
-          "sum((basecube.msr2)) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
+      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, "
+          + "sum((basecube.msr2)) as `alias3` FROM ", null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
     String begin = "select (basecube.alias0) as `dim1`, (basecube.alias1) as `dim11` from";
     compareContains(expected1, hqlQuery);
@@ -1086,21 +1023,21 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     hqlQuery = rewrite("select dim1, dim11 from basecube where " + TWO_DAYS_RANGE
       + "having msr12 > 2 and roundedmsr2 > 0 and msr2 > 100", conf);
     expected1 = getExpectedQuery(cubeName,
-        "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum((basecube.msr12)) as `alias2`, " +
-            "sum(0.0) as `alias3`, sum(0.0) as `alias4` FROM ", null, " group by basecube.dim1, basecube.dim11",
+        "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum((basecube.msr12)) as `alias2`, "
+            + "sum(0.0) as `alias3`, sum(0.0) as `alias4` FROM ", null, " group by basecube.dim1, basecube.dim11",
         getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     expected2 = getExpectedQuery(cubeName,
-      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, " +
-          "sum((basecube.msr2)) as `alias3`, sum((basecube.msr2)) as `alias4` FROM ", null,
+      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, "
+          + "sum((basecube.msr2)) as `alias3`, sum((basecube.msr2)) as `alias4` FROM ", null,
       " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
 
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
-    assertTrue(hqlQuery.toLowerCase().startsWith("select (basecube.alias0) as `dim1`, " +
-        "(basecube.alias1) as `dim11` from"), hqlQuery);
-    assertTrue(hqlQuery.contains("UNION ALL") && hqlQuery.endsWith("HAVING ((sum((basecube.alias2)) > 2) and " +
-        "(round((sum((basecube.alias4)) / 1000)) > 0) and (sum((basecube.alias4)) > 100))"), hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select (basecube.alias0) as `dim1`, "
+        + "(basecube.alias1) as `dim11` from"), hqlQuery);
+    assertTrue(hqlQuery.contains("UNION ALL") && hqlQuery.endsWith("HAVING ((sum((basecube.alias2)) > 2) and "
+        + "(round((sum((basecube.alias4)) / 1000)) > 0) and (sum((basecube.alias4)) > 100))"), hqlQuery);
 
     hqlQuery = rewrite("select dim1, dim11 from basecube where " + TWO_DAYS_RANGE
       + "having msr12+roundedmsr2 <= 1000", conf);
@@ -1110,15 +1047,15 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
       null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     expected2 = getExpectedQuery(cubeName,
-      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, " +
-          "sum((basecube.msr2)) as `alias3` FROM ",
+      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum(0.0) as `alias2`, "
+          + "sum((basecube.msr2)) as `alias3` FROM ",
       null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
 
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
-    assertTrue(hqlQuery.toLowerCase().startsWith("select (basecube.alias0) as `dim1`, (basecube.alias1) " +
-        "as `dim11` from"), hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select (basecube.alias0) as `dim1`, (basecube.alias1) "
+        + "as `dim11` from"), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL")
       && hqlQuery.endsWith("HAVING ((sum((basecube.alias2)) + round((sum((basecube.alias3)) / 1000))) <= 1000)"),
         hqlQuery);
@@ -1148,8 +1085,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     hqlQuery = rewrite("select dim1, dim11 from basecube where " + TWO_DAYS_RANGE
       + "having msr12 > 2 or roundedmsr2 > 0 or msr12+roundedmsr2 <= 1000", conf);
     expected1 = getExpectedQuery(cubeName,
-      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum((basecube.msr12)) as `alias2`, " +
-          "sum(0.0) as `alias3`, sum((basecube.msr12)) as `alias4`, sum(0.0) as `alias5` FROM ",
+      "SELECT (basecube.dim1) as `alias0`, (basecube.dim11) as `alias1`, sum((basecube.msr12)) as `alias2`, "
+          + "sum(0.0) as `alias3`, sum((basecube.msr12)) as `alias4`, sum(0.0) as `alias5` FROM ",
       null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
     expected2 = getExpectedQuery(cubeName,

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBridgeTableQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBridgeTableQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBridgeTableQueries.java
index 76618a7..2bf1ef8 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBridgeTableQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBridgeTableQueries.java
@@ -295,18 +295,22 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
             + "as `xsports`, (yusersports.balias0) as `ysports`, sum((basecube.msr2)) as `sum(msr2)` FROM ",
         " join " + getDbName() + "c1_usertable userdim_1 on basecube.userid = userdim_1.id "
             + " join  (select user_interests_1.user_id as user_id, collect_set(usersports.name) as balias0 from "
-            + getDbName() + "c1_user_interests_tbl user_interests_1 join " + getDbName() + "c1_sports_tbl usersports on "
+            + getDbName() + "c1_user_interests_tbl user_interests_1 join " + getDbName()
+            + "c1_sports_tbl usersports on "
             + "user_interests_1.sport_id = usersports.id group by user_interests_1.user_id) "
             + "usersports on userdim_1.id = usersports.user_id"
             + " join " + getDbName() + "c1_usertable userdim_0 on basecube.yuserid = userdim_0.id "
             + " join  (select user_interests_0.user_id as user_id,collect_set(yusersports.name) as balias0 from "
-            + getDbName() + "c1_user_interests_tbl user_interests_0 join " + getDbName() + "c1_sports_tbl yusersports on "
-            + " user_interests_0.sport_id = yusersports.id group by user_interests_0.user_id) yusersports on userdim_0.id ="
+            + getDbName() + "c1_user_interests_tbl user_interests_0 join " + getDbName()
+            + "c1_sports_tbl yusersports on  user_interests_0.sport_id = yusersports.id group by "
+            + "user_interests_0.user_id) yusersports on userdim_0.id ="
             + " yusersports.user_id join " + getDbName() + "c1_usertable userdim on basecube.xuserid = userdim.id"
             + " join  (select user_interests.user_id as user_id,collect_set(xusersports.name) as balias0 from "
-            + getDbName() + "c1_user_interests_tbl user_interests join " + getDbName() + "c1_sports_tbl xusersports"
-            + " on user_interests.sport_id = xusersports.id group by user_interests.user_id) xusersports on userdim.id = "
-            + " xusersports.user_id", null, "group by usersports.balias0, xusersports.balias0, yusersports.balias0", null,
+            + getDbName() + "c1_user_interests_tbl user_interests join " + getDbName()
+            + "c1_sports_tbl xusersports on user_interests.sport_id = xusersports.id "
+            + "group by user_interests.user_id) xusersports on userdim.id = "
+            + " xusersports.user_id",
+        null, "group by usersports.balias0, xusersports.balias0, yusersports.balias0", null,
         getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
     hqlQuery = rewrite(query, hConf);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
@@ -343,20 +347,26 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
     expected = getExpectedQuery("basecube", "SELECT (usersports.balias0) as `sports`, (xusersports.balias0) "
             + "as `xsports`, (yusersports.balias0) as `ysports`, sum((basecube.msr2)) as `sum(msr2)` FROM ",
         " left outer join " + getDbName() + "c1_usertable userdim_1 on basecube.userid = userdim_1.id "
-            + " left outer join  (select user_interests_1.user_id as user_id, collect_set(usersports.name) as balias0 from "
-            + getDbName() + "c1_user_interests_tbl user_interests_1 join " + getDbName() + "c1_sports_tbl usersports on "
+            + " left outer join  (select user_interests_1.user_id as user_id, "
+            + "collect_set(usersports.name) as balias0 from "
+            + getDbName() + "c1_user_interests_tbl user_interests_1 join "
+            + getDbName() + "c1_sports_tbl usersports on "
             + "user_interests_1.sport_id = usersports.id group by user_interests_1.user_id) "
             + "usersports on userdim_1.id = usersports.user_id"
             + " left outer join " + getDbName() + "c1_usertable userdim_0 on basecube.yuserid = userdim_0.id "
-            + " left outer join  (select user_interests_0.user_id as user_id,collect_set(yusersports.name) as balias0 from "
-            + getDbName() + "c1_user_interests_tbl user_interests_0 join " + getDbName() + "c1_sports_tbl yusersports on "
-            + " user_interests_0.sport_id = yusersports.id group by user_interests_0.user_id) yusersports on userdim_0.id ="
-            + " yusersports.user_id left outer join " + getDbName()
+            + " left outer join  (select user_interests_0.user_id as user_id,"
+            + "collect_set(yusersports.name) as balias0 from "
+            + getDbName() + "c1_user_interests_tbl user_interests_0 join " + getDbName()
+            + "c1_sports_tbl yusersports on "
+            + " user_interests_0.sport_id = yusersports.id group by user_interests_0.user_id) "
+            + "yusersports on userdim_0.id = yusersports.user_id left outer join " + getDbName()
             + "c1_usertable userdim on basecube.xuserid = userdim.id"
-            + " left outer join  (select user_interests.user_id as user_id,collect_set(xusersports.name) as balias0 from "
-            + getDbName() + "c1_user_interests_tbl user_interests join " + getDbName() + "c1_sports_tbl xusersports"
-            + " on user_interests.sport_id = xusersports.id group by user_interests.user_id) xusersports on userdim.id = "
-            + " xusersports.user_id", null, "group by usersports.balias0, xusersports.balias0, yusersports.balias0", null,
+            + " left outer join  (select user_interests.user_id as user_id,"
+            + "collect_set(xusersports.name) as balias0 from " + getDbName()
+            + "c1_user_interests_tbl user_interests join " + getDbName() + "c1_sports_tbl xusersports"
+            + " on user_interests.sport_id = xusersports.id group by user_interests.user_id) "
+            + "xusersports on userdim.id =  xusersports.user_id", null,
+        "group by usersports.balias0, xusersports.balias0, yusersports.balias0", null,
         getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
     hqlQuery = rewrite(query, conf);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
@@ -368,8 +378,8 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
     conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
     String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
     String hqlQuery = rewrite(query, conf);
-    String expected = getExpectedQuery("basecube", "SELECT (usersports.balias0) as `name`, sum((basecube.msr2)) " +
-        "as `sum(msr2)` FROM ", " join " + getDbName()
+    String expected = getExpectedQuery("basecube", "SELECT (usersports.balias0) as `name`, sum((basecube.msr2)) "
+        + "as `sum(msr2)` FROM ", " join " + getDbName()
         + "c2_usertable userdim ON basecube.userid = userdim.id and userdim.dt='latest' "
         + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as balias0"
         + " from " + getDbName() + "c2_user_interests_tbl user_interests"
@@ -382,8 +392,8 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
     TestCubeRewriter.compareQueries(hqlQuery, expected);
     // run with chain ref column
     query = "select sports, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
-    expected = getExpectedQuery("basecube", "SELECT (usersports.balias0) as `sports`, sum((basecube.msr2)) " +
-            "as `sum(msr2)` FROM ", " join " + getDbName()
+    expected = getExpectedQuery("basecube", "SELECT (usersports.balias0) as `sports`, sum((basecube.msr2)) "
+            + "as `sum(msr2)` FROM ", " join " + getDbName()
             + "c2_usertable userdim ON basecube.userid = userdim.id and userdim.dt='latest' "
             + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as balias0"
             + " from " + getDbName() + "c2_user_interests_tbl user_interests"
@@ -415,7 +425,8 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
         + "c1_statetable statedim_0 on citydim.stateid=statedim_0.id and statedim_0.dt='latest'"
         + " join " + getDbName()
         + "c1_countrytable cubecitystatecountry on statedim_0.countryid=cubecitystatecountry.id"
-        + " join " + getDbName() + "c1_statetable statedim on basecube.stateid=statedim.id and (statedim.dt = 'latest')"
+        + " join " + getDbName() + "c1_statetable statedim on basecube.stateid=statedim.id "
+          + "and (statedim.dt = 'latest')"
         + " join " + getDbName() + "c1_countrytable cubestatecountry on statedim.countryid=cubestatecountry.id ",
       null, "group by usersports.balias0, cubestatecountry.name, cubecitystatecountry.name", null,
       getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
@@ -431,12 +442,13 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
             + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
             + " group by user_interests.user_id) usersports"
             + " on userdim.id = usersports.user_id "
-            + " join " + getDbName() + "c1_citytable citydim on basecube.cityid = citydim.id and (citydim.dt = 'latest')"
-            + " join " + getDbName()
+            + " join " + getDbName() + "c1_citytable citydim on basecube.cityid = citydim.id "
+            + "and (citydim.dt = 'latest') join " + getDbName()
             + "c1_statetable statedim_0 on citydim.stateid=statedim_0.id and statedim_0.dt='latest'"
             + " join " + getDbName()
             + "c1_countrytable cubecitystatecountry on statedim_0.countryid=cubecitystatecountry.id"
-            + " join " + getDbName() + "c1_statetable statedim on basecube.stateid=statedim.id and (statedim.dt = 'latest')"
+            + " join " + getDbName() + "c1_statetable statedim on basecube.stateid=statedim.id "
+            + "and (statedim.dt = 'latest')"
             + " join " + getDbName() + "c1_countrytable cubestatecountry on statedim.countryid=cubestatecountry.id ",
         null, "group by usersports.balias0, cubestatecountry.name, cubecitystatecountry.name", null,
         getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
@@ -503,25 +515,26 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
   @Test
   public void testBridgeTablesWithFilterAndOrderby() throws Exception {
     String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE
-      + " and usersports.name = 'CRICKET' order by usersports.name";
+        + " and usersports.name = 'CRICKET' order by usersports.name";
     String hqlQuery = rewrite(query, hConf);
-    String expected = getExpectedQuery("basecube", "SELECT (usersports.balias0) as `name`, sum((basecube.msr2)) " +
-        "as `sum(msr2)` FROM ", " join "
-        + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
-        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as balias0"
-        + " from " + getDbName() + "c1_user_interests_tbl user_interests"
-        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id "
-        + " group by user_interests.user_id) usersports"
-        + " on userdim.id = usersports.user_id ",
-      null,
-      " and array_contains(usersports.balias0, 'CRICKET') group by usersports.balias0 order by usersports.balias0 asc",
-      null, getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    String expected = getExpectedQuery("basecube", "SELECT (usersports.balias0) as `name`, sum((basecube.msr2)) "
+            + "as `sum(msr2)` FROM ", " join "
+            + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
+            + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as balias0"
+            + " from " + getDbName() + "c1_user_interests_tbl user_interests"
+            + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id "
+            + " group by user_interests.user_id) usersports"
+            + " on userdim.id = usersports.user_id ",
+        null,
+        " and array_contains(usersports.balias0, 'CRICKET') group by usersports.balias0 "
+            + "order by name asc",
+        null, getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
     // run with chain ref column
     query = "select sports, sum(msr2) from basecube where " + TWO_DAYS_RANGE + " and sports = 'CRICKET' order by "
-      + "sports";
-    expected = getExpectedQuery("basecube", "SELECT (usersports.balias0) as `sports`, sum((basecube.msr2)) " +
-            "as `sum(msr2)` FROM ", " join "
+        + "sports";
+    expected = getExpectedQuery("basecube", "SELECT (usersports.balias0) as `sports`, sum((basecube.msr2)) "
+            + "as `sum(msr2)` FROM ", " join "
             + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
             + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as balias0"
             + " from " + getDbName() + "c1_user_interests_tbl user_interests"
@@ -529,7 +542,8 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
             + " group by user_interests.user_id) usersports"
             + " on userdim.id = usersports.user_id ",
         null,
-        " and array_contains(usersports.balias0, 'CRICKET') group by usersports.balias0 order by usersports.balias0 asc",
+        " and array_contains(usersports.balias0, 'CRICKET') group by usersports.balias0 "
+            + "order by sports asc",
         null, getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
     hqlQuery = rewrite(query, hConf);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
@@ -664,8 +678,8 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
     TestCubeRewriter.compareContains(expected1, hqlQuery);
     TestCubeRewriter.compareContains(expected2, hqlQuery);
     lower = hqlQuery.toLowerCase();
-    assertTrue(lower.startsWith("select (basecube.alias0) as `sports`, sum((basecube.alias1)) as `msr2`, " +
-        "sum((basecube.alias2)) as `msr12` from"), hqlQuery);
+    assertTrue(lower.startsWith("select (basecube.alias0) as `sports`, sum((basecube.alias1)) as `msr2`, "
+        + "sum((basecube.alias2)) as `msr12` from"), hqlQuery);
 
     assertTrue(hqlQuery.contains("UNION ALL") && hqlQuery.endsWith("GROUP BY (basecube.alias0)"),
       hqlQuery);
@@ -760,7 +774,8 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
             + "sum((basecube.msr2)) as `sum(msr2)` FROM ",
         " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
             + " join (select user_interests.user_id as user_id,collect_set(substr(usersports.name, 3)) as balias0"
-            + " collect_set(( usersports . name )) as balias1 from " + getDbName() + "c1_user_interests_tbl user_interests"
+            + " collect_set(( usersports . name )) as balias1 from " + getDbName()
+            + "c1_user_interests_tbl user_interests"
             + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id "
             + " group by user_interests.user_id) usersports"
             + " on userdim.id = usersports.user_id ",
@@ -789,7 +804,8 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
       getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
     TestCubeRewriter.compareQueries(hqlQuery, expected);
     // run with chain ref column
-    query = "select sports_abbr, sum(msr2) from basecube where " + TWO_DAYS_RANGE + " and sports = 'CRICKET,FOOTBALL'";
+    query = "select sports_abbr, sum(msr2) from basecube where " + TWO_DAYS_RANGE
+        + " and sports = 'CRICKET,FOOTBALL'";
     expected = getExpectedQuery("basecube", "SELECT substr((usersports.name), 3) as "
             + "`sports_abbr`, sum((basecube.msr2)) as `sum(msr2)` FROM ", " join "
             + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
@@ -963,8 +979,8 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
     TestCubeRewriter.compareContains(expected1, hqlQuery);
     TestCubeRewriter.compareContains(expected2, hqlQuery);
     String lower = hqlQuery.toLowerCase();
-    assertTrue(lower.startsWith("select (basecube.alias0) as `substr((usersports.name), 3)`, " +
-        "sum((basecube.alias1)) as `msr2`, sum((basecube.alias2)) as `msr12` from"),
+    assertTrue(lower.startsWith("select (basecube.alias0) as `substr((usersports.name), 3)`, "
+        + "sum((basecube.alias1)) as `msr2`, sum((basecube.alias2)) as `msr12` from"),
       hqlQuery);
 
     assertTrue(hqlQuery.contains("UNION ALL") && hqlQuery.endsWith("GROUP BY (basecube.alias0)"),
@@ -997,8 +1013,8 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
     TestCubeRewriter.compareContains(expected2, hqlQuery);
     lower = hqlQuery.toLowerCase();
     assertTrue(lower.startsWith(
-      "select (basecube.alias0) as `sports_abbr`, sum((basecube.alias1)) as `msr2`, " +
-          "sum((basecube.alias2)) as `msr12` from"), hqlQuery);
+      "select (basecube.alias0) as `sports_abbr`, sum((basecube.alias1)) as `msr2`, "
+          + "sum((basecube.alias2)) as `msr12` from"), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL")
         && hqlQuery.endsWith("GROUP BY (basecube.alias0)"),
       hqlQuery);
@@ -1046,7 +1062,8 @@ public class TestBridgeTableQueries extends TestQueryRewrite {
       " join " + getDbName() + "c1_usertable userchain ON basecube.userid = userchain.id "
         + " join ( select userinterestids.user_id as user_id, collect_set(userinterestids.sport_id) as balias0,"
         + " collect_set(userinterestids.user_id) as balias1 from  " + getDbName() + "c1_user_interests_tbl "
-        + " userinterestids group by userinterestids.user_id) userinterestids on userchain.id = userinterestids.user_id"
+        + " userinterestids group by userinterestids.user_id) userinterestids "
+        + "on userchain.id = userinterestids.user_id"
         + " join  (select userinterestids.user_id as user_id, collect_set(usersports . id) as balias0 from"
         + getDbName() + " c1_user_interests_tbl userinterestids join " + getDbName() + "c1_sports_tbl"
         + " usersports on userinterestids.sport_id = usersports.id group by userinterestids.user_id) usersports"


[4/4] lens git commit: Deleted deprecated classes, Fixed Checkstyles, Fixed test cases, Fixed duplicate projections

Posted by pu...@apache.org.
Deleted deprecated classes, Fixed Checkstyles, Fixed test cases, Fixed duplicate projections


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/975fa2c2
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/975fa2c2
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/975fa2c2

Branch: refs/heads/lens-1381
Commit: 975fa2c2b110ebf8652bd0ce67cd86bb9ac35c03
Parents: 4af769e
Author: Sushil Mohanty,Puneet Gupta and Lavkesh Lahngir <su...@apache.org>
Authored: Fri Feb 17 16:28:05 2017 +0530
Committer: Puneet <pu...@inmobi.com>
Committed: Fri Feb 17 16:28:05 2017 +0530

----------------------------------------------------------------------
 .../NoCandidateFactAvailableException.java      |   7 +-
 .../lens/cube/metadata/CubeMetastoreClient.java |   4 +-
 .../lens/cube/metadata/FactPartition.java       |   3 -
 .../lens/cube/parse/AggregateResolver.java      |   2 -
 .../org/apache/lens/cube/parse/Candidate.java   |  23 +-
 .../parse/CandidateCoveringSetsResolver.java    |  74 +++-
 .../apache/lens/cube/parse/CandidateFact.java   | 381 ----------------
 .../cube/parse/CandidateTablePruneCause.java    |  84 ++--
 .../lens/cube/parse/CandidateTableResolver.java |  22 +-
 .../apache/lens/cube/parse/CandidateUtil.java   |  78 ++--
 .../lens/cube/parse/CubeQueryContext.java       | 100 +----
 .../lens/cube/parse/CubeQueryRewriter.java      |   4 +-
 .../lens/cube/parse/DefaultAliasDecider.java    |   4 +-
 .../cube/parse/DenormalizationResolver.java     |  28 +-
 .../lens/cube/parse/ExpressionResolver.java     |   9 +-
 .../apache/lens/cube/parse/GroupbyResolver.java |   8 +-
 .../apache/lens/cube/parse/JoinCandidate.java   |  33 +-
 .../lens/cube/parse/LeastPartitionResolver.java |   2 +-
 .../cube/parse/MaxCoveringFactResolver.java     |   3 +-
 .../org/apache/lens/cube/parse/PruneCauses.java |   5 +-
 .../lens/cube/parse/QueriedPhraseContext.java   |  33 +-
 .../lens/cube/parse/StorageCandidate.java       | 192 +++++---
 .../lens/cube/parse/StorageTableResolver.java   |  43 +-
 .../lens/cube/parse/TimeRangeChecker.java       |   1 -
 .../apache/lens/cube/parse/UnionCandidate.java  |  24 +-
 .../lens/cube/parse/UnionQueryWriter.java       | 275 +++++++++---
 .../lens/cube/parse/join/AutoJoinContext.java   |   2 -
 .../apache/lens/driver/cube/RewriterPlan.java   |   1 -
 .../apache/lens/cube/parse/CubeTestSetup.java   |   7 +-
 .../lens/cube/parse/TestAggregateResolver.java  | 188 ++++----
 .../lens/cube/parse/TestBaseCubeQueries.java    | 433 ++++++++-----------
 .../lens/cube/parse/TestBridgeTableQueries.java | 115 ++---
 .../lens/cube/parse/TestCubeRewriter.java       | 204 +++++----
 .../cube/parse/TestDenormalizationResolver.java |  50 ++-
 .../lens/cube/parse/TestExpressionResolver.java |  12 +-
 .../lens/cube/parse/TestJoinResolver.java       |  22 +-
 .../lens/cube/parse/TestQueryMetrics.java       |  26 +-
 .../lens/cube/parse/TestRewriterPlan.java       |   2 -
 .../lens/cube/parse/TestTimeRangeResolver.java  |  58 ++-
 .../parse/TestTimeRangeWriterWithQuery.java     |  15 +-
 .../cube/parse/TestUnionAndJoinCandidates.java  |  23 +-
 .../lens/cube/parse/TestUnionQueries.java       |   1 -
 42 files changed, 1224 insertions(+), 1377 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
index 7d12762..301458f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
@@ -18,7 +18,6 @@
  */
 package org.apache.lens.cube.error;
 
-import org.apache.lens.cube.metadata.CubeFactTable;
 import org.apache.lens.cube.parse.PruneCauses;
 import org.apache.lens.cube.parse.StorageCandidate;
 import org.apache.lens.server.api.error.LensException;
@@ -29,7 +28,11 @@ public class NoCandidateFactAvailableException extends LensException {
   private final PruneCauses<StorageCandidate> briefAndDetailedError;
 
   public NoCandidateFactAvailableException(PruneCauses<StorageCandidate> briefAndDetailedError) {
-    super(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo(), briefAndDetailedError.getBriefCause());
+    this(briefAndDetailedError.getBriefCause(), briefAndDetailedError);
+  }
+
+  public NoCandidateFactAvailableException(String errMsg, PruneCauses<StorageCandidate> briefAndDetailedError) {
+    super(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo(), errMsg);
     this.briefAndDetailedError = briefAndDetailedError;
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index 6c9cde2..aa2e9d1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -938,14 +938,14 @@ public class CubeMetastoreClient {
     }
   }
 
-  private Date getStorageTableStartDate(String storageTable, String factTableName)
+  public Date getStorageTableStartDate(String storageTable, String factTableName)
     throws LensException {
     List<Date> startDates = getStorageTimes(storageTable, MetastoreUtil.getStoragetableStartTimesKey());
     startDates.add(getFactTable(factTableName).getStartTime());
     return Collections.max(startDates);
   }
 
-  private Date getStorageTableEndDate(String storageTable, String factTableName)
+  public Date getStorageTableEndDate(String storageTable, String factTableName)
     throws LensException {
     List<Date> endDates = getStorageTimes(storageTable, MetastoreUtil.getStoragetableEndTimesKey());
     endDates.add(getFactTable(factTableName).getEndTime());

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
index 86d6056..1694b80 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
@@ -30,7 +30,6 @@ import lombok.Getter;
 import lombok.Setter;
 
 @EqualsAndHashCode
-// TODO union : Change the class name To StoragePartition
 public class FactPartition implements Comparable<FactPartition> {
   @Getter
   private final String partCol;
@@ -40,8 +39,6 @@ public class FactPartition implements Comparable<FactPartition> {
   private final Set<String> storageTables = new LinkedHashSet<String>();
   @Getter
   private final UpdatePeriod period;
-
-  //TODO union : this is never set . Do we need this ?s
   @Getter
   @Setter
   private FactPartition containingPart;

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
index 79f38da..30b1a90 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
@@ -27,7 +27,6 @@ import java.util.Iterator;
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.CubeMeasure;
 import org.apache.lens.cube.metadata.ExprColumn;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.ExpressionResolver.ExprSpecContext;
 import org.apache.lens.server.api.error.LensException;
 
@@ -71,7 +70,6 @@ class AggregateResolver implements ContextRewriter {
       || hasMeasuresNotInDefaultAggregates(cubeql, cubeql.getHavingAST(), null, aggregateResolverDisabled)
       || hasMeasures(cubeql, cubeql.getWhereAST()) || hasMeasures(cubeql, cubeql.getGroupByAST())
       || hasMeasures(cubeql, cubeql.getOrderByAST())) {
-      //TODO union : Note : Pending : cube segmentation design may change the above assumption and Set<Candidate> can contain and mix of StorageCandidate and UnionSegmentCandidate. This step can then ignore UnionSegmentCandidate
       Iterator<Candidate> candItr = cubeql.getCandidates().iterator();
       while (candItr.hasNext()) {
         Candidate candidate = candItr.next();

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java
index 1987939..095a297 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/Candidate.java
@@ -1,6 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
 package org.apache.lens.cube.parse;
 
-import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Date;
 import java.util.Set;
@@ -93,7 +110,7 @@ public interface Candidate {
    * 1. For a JoinCandidate, atleast one of the child candidates should be able to answer the expression
    * 2. For a UnionCandidate, all child candidates should answer the expression
    *
-   * @param expr
+   * @param expr     :Expression need to be evaluated for Candidate
    * @return
    */
   boolean isExpressionEvaluable(ExpressionResolver.ExpressionContext expr);
@@ -104,4 +121,4 @@ public interface Candidate {
    */
   Set<Integer> getAnswerableMeasurePhraseIndices();
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
index 6d85edf..a3a42ab 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
@@ -1,8 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
 package org.apache.lens.cube.parse;
 
 import java.util.*;
 
-import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 
@@ -31,9 +48,22 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
       finalCandidates.addAll(cubeql.getCandidates());
     }
     List<Candidate> timeRangeCoveringSet = resolveTimeRangeCoveringFactSet(cubeql, queriedMsrs, qpcList);
+//    if (timeRangeCoveringSet.isEmpty()) {
+//      throw new NoCandidateFactAvailableException(cubeql.getCube().getName()
+//        + " does not have any facts that can cover the requested time range " + cubeql.getTimeRanges().toString()
+//        + " and queried measure set " + getColumns(queriedMsrs).toString(),
+//        cubeql.getStoragePruningMsgs());
+//    }
+    log.info("Time covering candidates :{}", timeRangeCoveringSet);
     List<List<Candidate>> measureCoveringSets = resolveJoinCandidates(timeRangeCoveringSet, queriedMsrs, cubeql);
+//    if (measureCoveringSets.isEmpty()) {
+//      throw new NoCandidateFactAvailableException(cubeql.getCube().getName()
+//        + " does not have any facts that can cover the queried measure set "
+//        + getColumns(queriedMsrs).toString(),
+//        cubeql.getStoragePruningMsgs());
+//    }
     updateFinalCandidates(measureCoveringSets, cubeql);
-    log.info("Covering candidate sets :{}", finalCandidates);
+    log.info("Final Time and Measure covering candidates :{}", finalCandidates);
     cubeql.getCandidates().clear();
     cubeql.getCandidates().addAll(finalCandidates);
   }
@@ -50,7 +80,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
   }
 
   private void updateFinalCandidates(List<List<Candidate>> joinCandidates, CubeQueryContext cubeql) {
-    for (Iterator<List<Candidate>> itr = joinCandidates.iterator(); itr.hasNext(); ) {
+    for (Iterator<List<Candidate>> itr = joinCandidates.iterator(); itr.hasNext();) {
       List<Candidate> joinCandidate = itr.next();
       if (joinCandidate.size() == 1) {
         finalCandidates.add(joinCandidate.iterator().next());
@@ -61,7 +91,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
   }
 
   private boolean isCandidateCoveringTimeRanges(UnionCandidate uc, List<TimeRange> ranges) {
-    for (Iterator<TimeRange> itr = ranges.iterator(); itr.hasNext(); ) {
+    for (Iterator<TimeRange> itr = ranges.iterator(); itr.hasNext();) {
       TimeRange range = itr.next();
       if (!CandidateUtil.isTimeRangeCovered(uc.getChildren(), range.getFromDate(), range.getToDate())) {
         return false;
@@ -70,11 +100,12 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
     return true;
   }
 
-  private void pruneUnionCandidatesNotCoveringAllRanges(List<UnionCandidate> ucs, List<TimeRange> ranges) {
-    for (Iterator<UnionCandidate> itr = ucs.iterator(); itr.hasNext(); ) {
+  private void pruneUnionCandidatesNotCoveringAllRanges(List<UnionCandidate> ucs, CubeQueryContext cubeql) {
+    for (Iterator<UnionCandidate> itr = ucs.iterator(); itr.hasNext();) {
       UnionCandidate uc = itr.next();
-      if (!isCandidateCoveringTimeRanges(uc, ranges)) {
+      if (!isCandidateCoveringTimeRanges(uc, cubeql.getTimeRanges())) {
         itr.remove();
+        cubeql.addCandidatePruningMsg(uc, CandidateTablePruneCause.storageNotAvailableInRange(cubeql.getTimeRanges()));
       }
     }
   }
@@ -96,7 +127,8 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
         } else if (CandidateUtil.isPartiallyValidForTimeRanges(sc, cubeql.getTimeRanges())) {
           allCandidatesPartiallyValid.add(CandidateUtil.cloneStorageCandidate(sc));
         } else {
-          //TODO union : Add cause
+          cubeql.addCandidatePruningMsg(sc, CandidateTablePruneCause.storageNotAvailableInRange(
+            cubeql.getTimeRanges()));
         }
       } else {
         throw new LensException("Not a StorageCandidate!!");
@@ -108,7 +140,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
     // Sort the Collection based on no of elements
     Collections.sort(unionCoveringSet, new CandidateUtil.ChildrenSizeBasedCandidateComparator<UnionCandidate>());
     // prune non covering sets
-    pruneUnionCandidatesNotCoveringAllRanges(unionCoveringSet, cubeql.getTimeRanges());
+    pruneUnionCandidatesNotCoveringAllRanges(unionCoveringSet, cubeql);
     // prune candidate set which doesn't contain any common measure i
     pruneUnionCoveringSetWithoutAnyCommonMeasure(unionCoveringSet, queriedMsrs, cubeql);
     // prune redundant covering sets
@@ -116,7 +148,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
     // pruing done in the previous steps, now create union candidates
     candidateSet.addAll(unionCoveringSet);
     updateQueriableMeasures(candidateSet, qpcList, cubeql);
-    return candidateSet ;
+    return candidateSet;
   }
 
   private boolean isMeasureAnswerablebyUnionCandidate(QueriedPhraseContext msr, Candidate uc,
@@ -137,7 +169,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
   private void pruneUnionCoveringSetWithoutAnyCommonMeasure(List<UnionCandidate> ucs,
       Set<QueriedPhraseContext> queriedMsrs,
       CubeQueryContext cubeql) throws LensException {
-    for (ListIterator<UnionCandidate> itr = ucs.listIterator(); itr.hasNext(); ) {
+    for (ListIterator<UnionCandidate> itr = ucs.listIterator(); itr.hasNext();) {
       boolean toRemove = true;
       UnionCandidate uc = itr.next();
       for (QueriedPhraseContext msr : queriedMsrs) {
@@ -156,7 +188,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
     for (int i = 0; i < candidates.size(); i++) {
       UnionCandidate current = candidates.get(i);
       int j = i + 1;
-      for (ListIterator<UnionCandidate> itr = candidates.listIterator(j); itr.hasNext(); ) {
+      for (ListIterator<UnionCandidate> itr = candidates.listIterator(j); itr.hasNext();) {
         UnionCandidate next = itr.next();
         if (next.getChildren().containsAll(current.getChildren())) {
           itr.remove();
@@ -182,7 +214,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
         clonedI = clonedI >>> 1;
         --count;
       }
-      combinations.add(new UnionCandidate(individualCombinationList, cubeql ));
+      combinations.add(new UnionCandidate(individualCombinationList, cubeql));
     }
     return combinations;
   }
@@ -192,7 +224,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
     List<List<Candidate>> msrCoveringSets = new ArrayList<>();
     List<Candidate> ucSet = new ArrayList<>(unionCandidates);
     // Check if a single set can answer all the measures and exprsWithMeasures
-    for (Iterator<Candidate> i = ucSet.iterator(); i.hasNext(); ) {
+    for (Iterator<Candidate> i = ucSet.iterator(); i.hasNext();) {
       boolean evaluable = false;
       Candidate uc = i.next();
       for (QueriedPhraseContext msr : msrs) {
@@ -211,7 +243,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
     }
     // Sets that contain all measures or no measures are removed from iteration.
     // find other facts
-    for (Iterator<Candidate> i = ucSet.iterator(); i.hasNext(); ) {
+    for (Iterator<Candidate> i = ucSet.iterator(); i.hasNext();) {
       Candidate uc = i.next();
       i.remove();
       // find the remaining measures in other facts
@@ -238,7 +270,7 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
 
   private void updateQueriableMeasures(List<Candidate> cands,
       List<QueriedPhraseContext> qpcList, CubeQueryContext cubeql) throws LensException {
-    for (Candidate cand : cands ) {
+    for (Candidate cand : cands) {
       updateStorageCandidateQueriableMeasures(cand, qpcList, cubeql);
     }
   }
@@ -276,4 +308,12 @@ public class CandidateCoveringSetsResolver implements ContextRewriter {
       }
     }
   }
-}
\ No newline at end of file
+
+  private static Set<String> getColumns(Collection<QueriedPhraseContext> queriedPhraseContexts) {
+    Set<String> cols = new HashSet<>();
+    for (QueriedPhraseContext qur : queriedPhraseContexts) {
+      cols.addAll(qur.getColumns());
+    }
+    return cols;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
deleted file mode 100644
index ef7b9bc..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ /dev/null
@@ -1,381 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
-
-import java.util.*;
-
-import org.apache.lens.cube.metadata.*;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.lib.Node;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.session.SessionState;
-
-import org.antlr.runtime.CommonToken;
-
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import lombok.Getter;
-import lombok.Setter;
-
-//TODO union : delete this class and use Candidate and StorageCandidtae
-/**
- * Holds context of a candidate fact table.
- */
-@Deprecated
-public class CandidateFact implements CandidateTable, QueryAST {
-  final CubeFactTable fact;
-  @Getter
-  private Set<String> storageTables;
-  @Getter
-  private int numQueriedParts = 0;
-  @Getter
-  private final Set<FactPartition> partsQueried = Sets.newHashSet();
-
-  private CubeInterface baseTable;
-  @Getter
-  @Setter
-  private ASTNode selectAST;
-  @Getter
-  @Setter
-  private ASTNode whereAST;
-  @Getter
-  @Setter
-  private ASTNode groupByAST;
-  @Getter
-  @Setter
-  private ASTNode havingAST;
-  @Getter
-  @Setter
-  private ASTNode joinAST;
-  @Getter
-  @Setter
-  private ASTNode orderByAST;
-  @Getter
-  @Setter
-  private Integer limitValue;
-  @Getter
-  private String fromString;
-  private final List<Integer> selectIndices = Lists.newArrayList();
-  private final List<Integer> dimFieldIndices = Lists.newArrayList();
-  private Collection<String> columns;
-  @Getter
-  private final Map<String, ASTNode> storgeWhereClauseMap = new HashMap<>();
-  @Getter
-  private final Map<String, String> storgeWhereStringMap = new HashMap<>();
-  @Getter
-  private final Map<TimeRange, Map<String, LinkedHashSet<FactPartition>>> rangeToStoragePartMap = new HashMap<>();
-  @Getter
-  private final Map<TimeRange, Map<String, String>> rangeToStorageWhereMap = new HashMap<>();
-  @Getter
-  @Setter
-  private Map<String, Map<String, Float>> dataCompletenessMap;
-
-  CandidateFact(CubeFactTable fact, CubeInterface cube) {
-    this.fact = fact;
-    this.baseTable = cube;
-  }
-
-  @Override
-  public String toString() {
-    return fact.toString();
-  }
-
-  public Collection<String> getColumns() {
-    if (columns == null) {
-      columns = fact.getValidColumns();
-      if (columns == null) {
-        columns = fact.getAllFieldNames();
-      }
-    }
-    return columns;
-  }
-
-  @Override
-  public Set<?> getParticipatingPartitions() {
-    return null;
-  }
-
-  public boolean isValidForTimeRange(TimeRange timeRange) {
-    return (!timeRange.getFromDate().before(fact.getStartTime())) && (!timeRange.getToDate().after(fact.getEndTime()));
-  }
-
-  public void addToHaving(ASTNode ast) {
-    if (getHavingAST() == null) {
-      setHavingAST(new ASTNode(new CommonToken(TOK_HAVING, "TOK_HAVING")));
-      getHavingAST().addChild(ast);
-      return;
-    }
-    ASTNode existingHavingAST = (ASTNode) getHavingAST().getChild(0);
-    ASTNode newHavingAST = new ASTNode(new CommonToken(KW_AND, "AND"));
-    newHavingAST.addChild(existingHavingAST);
-    newHavingAST.addChild(ast);
-    getHavingAST().setChild(0, newHavingAST);
-  }
-
-  public String addAndGetAliasFromSelect(ASTNode ast, AliasDecider aliasDecider) {
-    for (Node n : getSelectAST().getChildren()) {
-      ASTNode astNode = (ASTNode) n;
-      if (HQLParser.equalsAST(ast, (ASTNode) astNode.getChild(0))) {
-        if (astNode.getChildCount() > 1) {
-          return astNode.getChild(1).getText();
-        }
-        String alias = aliasDecider.decideAlias(astNode);
-        astNode.addChild(new ASTNode(new CommonToken(Identifier, alias)));
-        return alias;
-      }
-    }
-    // Not found, have to add to select
-    String alias = aliasDecider.decideAlias(ast);
-    ASTNode selectExprNode = new ASTNode(new CommonToken(TOK_SELEXPR));
-    selectExprNode.addChild(ast);
-    selectExprNode.addChild(new ASTNode(new CommonToken(Identifier, alias)));
-    getSelectAST().addChild(selectExprNode);
-    return alias;
-  }
-
-  void incrementPartsQueried(int incr) {
-    numQueriedParts += incr;
-  }
-
-  // copy ASTs from CubeQueryContext
-  public void copyASTs(CubeQueryContext cubeql) throws LensException {
-    setSelectAST(MetastoreUtil.copyAST(cubeql.getSelectAST()));
-    setWhereAST(MetastoreUtil.copyAST(cubeql.getWhereAST()));
-    if (cubeql.getJoinAST() != null) {
-      setJoinAST(MetastoreUtil.copyAST(cubeql.getJoinAST()));
-    }
-    if (cubeql.getGroupByAST() != null) {
-      setGroupByAST(MetastoreUtil.copyAST(cubeql.getGroupByAST()));
-    }
-  }
-
-
-  public ASTNode getStorageWhereClause(String storageTable) {
-    return storgeWhereClauseMap.get(storageTable);
-  }
-  public String getStorageWhereString(String storageTable) {
-    return storgeWhereStringMap.get(storageTable);
-  }
-
-  public boolean isExpressionAnswerable(ASTNode node, CubeQueryContext context) throws LensException {
-    return getColumns().containsAll(HQLParser.getColsInExpr(context.getAliasForTableName(context.getCube()), node));
-  }
-
-  /**
-   * Update the ASTs to include only the fields queried from this fact, in all the expressions
-   *
-   * @param cubeql
-   * @throws LensException
-   */
-  public void updateASTs(CubeQueryContext cubeql) throws LensException {
-    // update select AST with selected fields
-    int currentChild = 0;
-    for (int i = 0; i < cubeql.getSelectAST().getChildCount(); i++) {
-      ASTNode selectExpr = (ASTNode) this.selectAST.getChild(currentChild);
-      Set<String> exprCols = HQLParser.getColsInExpr(cubeql.getAliasForTableName(cubeql.getCube()), selectExpr);
-      if (getColumns().containsAll(exprCols)) {
-        selectIndices.add(i);
-        if (exprCols.isEmpty() // no direct fact columns
-          // does not have measure names
-          || (!containsAny(cubeql.getCube().getMeasureNames(), exprCols))) {
-          dimFieldIndices.add(i);
-        }
-        ASTNode aliasNode = HQLParser.findNodeByPath(selectExpr, Identifier);
-        String alias = cubeql.getSelectPhrases().get(i).getSelectAlias();
-        if (aliasNode != null) {
-          String queryAlias = aliasNode.getText();
-          if (!queryAlias.equals(alias)) {
-            // replace the alias node
-            ASTNode newAliasNode = new ASTNode(new CommonToken(HiveParser.Identifier, alias));
-            this.selectAST.getChild(currentChild).replaceChildren(selectExpr.getChildCount() - 1,
-              selectExpr.getChildCount() - 1, newAliasNode);
-          }
-        } else {
-          // add column alias
-          ASTNode newAliasNode = new ASTNode(new CommonToken(HiveParser.Identifier, alias));
-          this.selectAST.getChild(currentChild).addChild(newAliasNode);
-        }
-      } else {
-        this.selectAST.deleteChild(currentChild);
-        currentChild--;
-      }
-      currentChild++;
-    }
-
-    // don't need to update where ast, since where is only on dim attributes and dim attributes
-    // are assumed to be common in multi fact queries.
-
-    // push down of having clauses happens just after this call in cubequerycontext
-  }
-
-  // The source set contains atleast one column in the colSet
-  static boolean containsAny(Collection<String> srcSet, Collection<String> colSet) {
-    if (colSet == null || colSet.isEmpty()) {
-      return true;
-    }
-    for (String column : colSet) {
-      if (srcSet.contains(column)) {
-        return true;
-      }
-    }
-    return false;
-  }
-
-  @Override
-  public String getStorageString(String alias) {
-    return StringUtils.join(storageTables, ",") + " " + alias;
-  }
-
-  @Override
-  public String getStorageName() {
-    return null;
-  }
-
-  public void setStorageTables(Set<String> storageTables) {
-    String database = SessionState.get().getCurrentDatabase();
-    // Add database name prefix for non default database
-    if (StringUtils.isNotBlank(database) && !"default".equalsIgnoreCase(database)) {
-      Set<String> storageTbls = new TreeSet<>();
-      Iterator<String> names = storageTables.iterator();
-      while (names.hasNext()) {
-        storageTbls.add(database + "." + names.next());
-      }
-      this.storageTables = storageTbls;
-    } else {
-      this.storageTables = storageTables;
-    }
-  }
-
-  @Override
-  public AbstractCubeTable getBaseTable() {
-    return (AbstractCubeTable) baseTable;
-  }
-
-  @Override
-  public CubeFactTable getTable() {
-    return fact;
-  }
-
-  @Override
-  public String getName() {
-    return fact.getName();
-  }
-
-  @Override
-  public boolean equals(Object obj) {
-    if (!super.equals(obj)) {
-      return false;
-    }
-    CandidateFact other = (CandidateFact) obj;
-
-    if (this.getTable() == null) {
-      if (other.getTable() != null) {
-        return false;
-      }
-    }
-    return true;
-  }
-
-  @Override
-  public int hashCode() {
-    final int prime = 31;
-    int result = super.hashCode();
-    result = prime * result + ((getTable() == null) ? 0 : getTable().getName().toLowerCase().hashCode());
-    return result;
-  }
-
-  public String getSelectString() {
-    return HQLParser.getString(selectAST);
-  }
-
-  public String getWhereString() {
-    if (whereAST != null) {
-      return HQLParser.getString(whereAST);
-    }
-    return null;
-  }
-
-  public String getHavingString() {
-    if (havingAST != null) {
-      return HQLParser.getString(havingAST);
-    }
-    return null;
-  }
-
-  @Override
-  public String getOrderByString() {
-    if (orderByAST != null) {
-      return HQLParser.getString(orderByAST);
-    }
-    return null;
-  }
-
-  /**
-   * @return the selectIndices
-   */
-  public List<Integer> getSelectIndices() {
-    return selectIndices;
-  }
-
-  /**
-   * @return the groupbyIndices
-   */
-  public List<Integer> getDimFieldIndices() {
-    return dimFieldIndices;
-  }
-
-  public String getGroupByString() {
-    if (groupByAST != null) {
-      return HQLParser.getString(groupByAST);
-    }
-    return null;
-  }
-
-  public Set<String> getTimePartCols(CubeQueryContext query) throws LensException {
-    Set<String> cubeTimeDimensions = baseTable.getTimedDimensions();
-    Set<String> timePartDimensions = new HashSet<String>();
-    String singleStorageTable = storageTables.iterator().next();
-    List<FieldSchema> partitionKeys = null;
-    partitionKeys = query.getMetastoreClient().getTable(singleStorageTable).getPartitionKeys();
-    for (FieldSchema fs : partitionKeys) {
-      if (cubeTimeDimensions.contains(CubeQueryContext.getTimeDimOfPartitionColumn(baseTable, fs.getName()))) {
-        timePartDimensions.add(fs.getName());
-      }
-    }
-    return timePartDimensions;
-  }
-
-  /*
-  public void updateFromString(CubeQueryContext query, Set<Dimension> queryDims,
-    Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
-    fromString = "%s"; // to update the storage alias later
-    if (query.isAutoJoinResolved()) {
-      fromString =
-        query.getAutoJoinCtx().getFromString(fromString, this, queryDims, dimsToQuery,
-          query, this);
-    }
-  }
-  */
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
index cef8f37..c7f2047 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
@@ -38,25 +38,34 @@ import lombok.NoArgsConstructor;
 @JsonWriteNullProperties(false)
 @Data
 @NoArgsConstructor
-//TODO union: Since we are working on StoargeCandidates now, we might need some chnages here
 public class CandidateTablePruneCause {
 
   public enum CandidateTablePruneCode {
     // other fact set element is removed
     ELEMENT_IN_SET_PRUNED("Other candidate from measure covering set is pruned"),
-    FACT_NOT_AVAILABLE_IN_RANGE("No facts available for all of these time ranges: %s") {
-      @Override
+
+    COLUMN_NOT_FOUND("%s are not %s") {
       Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
-        Set<TimeRange> allRanges = Sets.newHashSet();
-        for (CandidateTablePruneCause cause : causes) {
-          allRanges.addAll(cause.getInvalidRanges());
+        if (causes.size() == 1) {
+          return new String[]{
+            "Columns " + causes.iterator().next().getMissingColumns(),
+            "present in any table",
+          };
+        } else {
+          List<List<String>> columnSets = new ArrayList<List<String>>();
+          for (CandidateTablePruneCause cause : causes) {
+            columnSets.add(cause.getMissingColumns());
+          }
+          return new String[]{
+            "Column Sets: " + columnSets,
+            "queriable together",
+          };
         }
-        return new Object[]{
-          allRanges.toString(),
-        };
       }
     },
-
+    // candidate table tries to get denormalized field from dimension and the
+    // referred dimension is invalid.
+    INVALID_DENORM_TABLE("Referred dimension is invalid in one of the candidate tables"),
 
     // Moved from Stoarge causes .
     //The storage is removed as its not set in property "lens.cube.query.valid.fact.<fact_name>.storagetables"
@@ -65,9 +74,7 @@ public class CandidateTablePruneCause {
     // STOARGE_TABLE_DOES_NOT_EXIST("Storage table does not exist"),
     // storage has no update periods queried. Commented as its not being used anywhere in master.
     // MISSING_UPDATE_PERIODS("Storage has no update periods"),
-    // no candidate update periods, update period cause will have why each
-    // update period is not a candidate
-    NO_CANDIDATE_UPDATE_PERIODS("Storage update periods are not candidate"),
+
     // storage table has no partitions queried
     NO_PARTITIONS("Storage table has no partitions"),
     // partition column does not exist
@@ -76,7 +83,20 @@ public class CandidateTablePruneCause {
     TIME_RANGE_NOT_ANSWERABLE("Range not answerable"),
     // storage is not supported by execution engine/driver
     UNSUPPORTED_STORAGE("Unsupported Storage"),
-    
+
+    STORAGE_NOT_AVAILABLE_IN_RANGE("No storages available for all of these time ranges: %s") {
+      @Override
+      Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
+        Set<TimeRange> allRanges = Sets.newHashSet();
+        for (CandidateTablePruneCause cause : causes) {
+          allRanges.addAll(cause.getInvalidRanges());
+        }
+        return new Object[]{
+          allRanges.toString(),
+        };
+      }
+    },
+
     // least weight not satisfied
     MORE_WEIGHT("Picked table had more weight than minimum."),
     // partial data is enabled, another fact has more data.
@@ -95,13 +115,10 @@ public class CandidateTablePruneCause {
         return new String[]{columns.toString()};
       }
     },
-    // candidate table tries to get denormalized field from dimension and the
-    // referred dimension is invalid.
-    INVALID_DENORM_TABLE("Referred dimension is invalid in one of the candidate tables"),
     // column not valid in cube table. Commented the below line as it's not being used in master.
     //COLUMN_NOT_VALID("Column not valid in cube table"),
     // column not found in cube table
-    COLUMN_NOT_FOUND("%s are not %s") {
+    DENORM_COLUMN_NOT_FOUND("%s are not %s") {
       Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
         if (causes.size() == 1) {
           return new String[]{
@@ -138,7 +155,13 @@ public class CandidateTablePruneCause {
         };
       }
     },
-    NO_FACT_UPDATE_PERIODS_FOR_GIVEN_RANGE("No fact update periods for given range"),
+    //Commented as its not used anymore.
+    //NO_FACT_UPDATE_PERIODS_FOR_GIVEN_RANGE("No fact update periods for given range"),
+
+    // no candidate update periods, update period cause will have why each
+    // update period is not a candidate
+    NO_CANDIDATE_UPDATE_PERIODS("Storage update periods are not valid for given time range"),
+
     NO_COLUMN_PART_OF_A_JOIN_PATH("No column part of a join path. Join columns: [%s]") {
       Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
         List<String> columns = new ArrayList<String>();
@@ -232,8 +255,6 @@ public class CandidateTablePruneCause {
   // the fact is not partitioned by part col of the time dim and time dim is not a dim attribute
   private Set<String> unsupportedTimeDims;
   // time covered
-  // TODO union : Fix this after MaxCoveringFactResolver chnaged wrt. Candidate
-  //private MaxCoveringFactResolver.TimeCovered maxTimeCovered;
   // ranges in which fact is invalid
   private List<TimeRange> invalidRanges;
 
@@ -247,8 +268,8 @@ public class CandidateTablePruneCause {
   }
 
   // Different static constructors for different causes.
-  public static CandidateTablePruneCause factNotAvailableInRange(List<TimeRange> ranges) {
-    CandidateTablePruneCause cause = new CandidateTablePruneCause(FACT_NOT_AVAILABLE_IN_RANGE);
+  public static CandidateTablePruneCause storageNotAvailableInRange(List<TimeRange> ranges) {
+    CandidateTablePruneCause cause = new CandidateTablePruneCause(STORAGE_NOT_AVAILABLE_IN_RANGE);
     cause.invalidRanges = ranges;
     return cause;
   }
@@ -258,22 +279,23 @@ public class CandidateTablePruneCause {
     return cause;
   }
 
-  public static CandidateTablePruneCause columnNotFound(Collection<String>... missingColumns) {
+  public static CandidateTablePruneCause columnNotFound(CandidateTablePruneCode pruneCode,
+      Collection<String>... missingColumns) {
     List<String> colList = new ArrayList<String>();
     for (Collection<String> missing : missingColumns) {
       colList.addAll(missing);
     }
-    CandidateTablePruneCause cause = new CandidateTablePruneCause(COLUMN_NOT_FOUND);
+    CandidateTablePruneCause cause = new CandidateTablePruneCause(pruneCode);
     cause.setMissingColumns(colList);
     return cause;
   }
 
-  public static CandidateTablePruneCause columnNotFound(String... columns) {
+  public static CandidateTablePruneCause columnNotFound(CandidateTablePruneCode pruneCode, String... columns) {
     List<String> colList = new ArrayList<String>();
     for (String column : columns) {
       colList.add(column);
     }
-    return columnNotFound(colList);
+    return columnNotFound(pruneCode, colList);
   }
 
   public static CandidateTablePruneCause expressionNotEvaluable(String... exprs) {
@@ -300,14 +322,6 @@ public class CandidateTablePruneCause {
     return cause;
   }
 
-  // TODO union : uncomment the below method after MaxCoveringFactResolver is fixed wrt. Candidate
-  /*
-  public static CandidateTablePruneCause lessData(MaxCoveringFactResolver.TimeCovered timeCovered) {
-    CandidateTablePruneCause cause = new CandidateTablePruneCause(LESS_DATA);
-    cause.setMaxTimeCovered(timeCovered);
-    return cause;
-  }
-*/
   public static CandidateTablePruneCause noColumnPartOfAJoinPath(final Collection<String> colSet) {
     CandidateTablePruneCause cause =
       new CandidateTablePruneCause(NO_COLUMN_PART_OF_A_JOIN_PATH);

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
index 7a885a2..2ab7f4b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
@@ -58,7 +58,6 @@ class CandidateTableResolver implements ContextRewriter {
   public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     if (checkForQueriedColumns) {
       log.debug("Dump queried columns:{}", cubeql.getTblAliasToColumns());
-      //TODO union : create StoargeCandidate s now in populateCandidateTables
       populateCandidateTables(cubeql);
       resolveCandidateFactTables(cubeql);
       resolveCandidateDimTables(cubeql);
@@ -74,7 +73,6 @@ class CandidateTableResolver implements ContextRewriter {
       if (cubeql.getAutoJoinCtx() != null) {
         // Before checking for candidate table columns, prune join paths containing non existing columns
         // in populated candidate tables
-        //TODO rewrite : commented below line to compile
         cubeql.getAutoJoinCtx().pruneAllPaths(cubeql.getCube(),
             CandidateUtil.getStorageCandidates(cubeql.getCandidates()), null);
         cubeql.getAutoJoinCtx().pruneAllPathsForCandidateDims(cubeql.getCandidateDimTables());
@@ -84,8 +82,6 @@ class CandidateTableResolver implements ContextRewriter {
       // check for joined columns and denorm columns on refered tables
       resolveCandidateFactTablesForJoins(cubeql);
       resolveCandidateDimTablesForJoinsAndDenorms(cubeql);
-      // TODO union : below method can be deleted from CubeQueryContext
-      //cubeql.pruneCandidateFactSet(CandidateTablePruneCode.INVALID_DENORM_TABLE);
       checkForQueriedColumns = true;
     }
   }
@@ -260,7 +256,7 @@ class CandidateTableResolver implements ContextRewriter {
         }
       }
       // Remove storage candidates based on whether they are valid or not.
-      for (Iterator<Candidate> i = cubeql.getCandidates().iterator(); i.hasNext(); ) {
+      for (Iterator<Candidate> i = cubeql.getCandidates().iterator(); i.hasNext();) {
         Candidate cand = i.next();
         if (cand instanceof StorageCandidate) {
           StorageCandidate sc = (StorageCandidate) cand;
@@ -287,7 +283,8 @@ class CandidateTableResolver implements ContextRewriter {
           for (QueriedPhraseContext qur : dimExprs) {
             if (!qur.isEvaluable(cubeql, sc)) {
               log.info("Not considering storage candidate:{} as columns {} are not available", sc, qur.getColumns());
-              cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(qur.getColumns()));
+              cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(
+                  CandidateTablePruneCode.COLUMN_NOT_FOUND, qur.getColumns()));
               toRemove = true;
               break;
             }
@@ -299,7 +296,8 @@ class CandidateTableResolver implements ContextRewriter {
           if (!checkForFactColumnExistsAndValidForRange(sc, queriedMsrs, cubeql)) {
             Set<String> columns = getColumns(queriedMsrs);
             log.info("Not considering storage candidate:{} as columns {} is not available", sc, columns);
-            cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(columns));
+            cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(
+                CandidateTablePruneCode.COLUMN_NOT_FOUND, columns));
             toRemove = true;
           }
 
@@ -312,7 +310,8 @@ class CandidateTableResolver implements ContextRewriter {
               if (optdim == null) {
                 log.info("Not considering storage candidate:{} as columns {} are not available", sc,
                     chain.getSourceColumns());
-                cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(chain.getSourceColumns()));
+                cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(
+                    CandidateTablePruneCode.COLUMN_NOT_FOUND, chain.getSourceColumns()));
                 toRemove = true;
                 break;
               }
@@ -540,7 +539,7 @@ class CandidateTableResolver implements ContextRewriter {
                   log.info("Not considering Storage:{} as its required optional dims are not reachable", candidate);
                   cubeql.getCandidates().remove(candidate);
                   cubeql.addStoragePruningMsg((StorageCandidate) candidate,
-                      CandidateTablePruneCause.columnNotFound(col));
+                      CandidateTablePruneCause.columnNotFound(CandidateTablePruneCode.COLUMN_NOT_FOUND, col));
                   Collection<Candidate> prunedCandidates = CandidateUtil.
                       filterCandidates(cubeql.getCandidates(), (StorageCandidate) candidate);
                   cubeql.addCandidatePruningMsg(prunedCandidates,
@@ -551,7 +550,7 @@ class CandidateTableResolver implements ContextRewriter {
                 cubeql.getCandidateDimTables().get(((CandidateDim) candidate).getBaseTable()).remove(candidate);
                 cubeql.addDimPruningMsgs((Dimension) candidate.getBaseTable(),
                   (CubeDimensionTable) candidate.getTable(),
-                  CandidateTablePruneCause.columnNotFound(col));
+                  CandidateTablePruneCause.columnNotFound(CandidateTablePruneCode.COLUMN_NOT_FOUND, col));
               }
             }
           }
@@ -650,7 +649,8 @@ class CandidateTableResolver implements ContextRewriter {
                   // check if it available as reference, if not remove the
                   // candidate
                   log.info("Not considering dimtable: {} as column {} is not available", cdim, col);
-                  cubeql.addDimPruningMsgs(dim, cdim.getTable(), CandidateTablePruneCause.columnNotFound(col));
+                  cubeql.addDimPruningMsgs(dim, cdim.getTable(), CandidateTablePruneCause.columnNotFound(
+                      CandidateTablePruneCode.COLUMN_NOT_FOUND, col));
                   i.remove();
                   break;
                 }

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
index 6cb7e3f..025a6ba 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
@@ -1,26 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for theJoinCandidate.java
+ * specific language governing permissions and limitations
+ * under the License.
+ */
 package org.apache.lens.cube.parse;
 
+import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
+
 import java.util.*;
 
-import org.antlr.runtime.CommonToken;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.lens.cube.metadata.CubeMetastoreClient;
-import org.apache.lens.cube.metadata.FactPartition;
-import org.apache.lens.cube.metadata.MetastoreUtil;
-import org.apache.lens.cube.metadata.TimeRange;
+import org.apache.lens.cube.metadata.*;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+
+import org.antlr.runtime.CommonToken;
 
 import com.google.common.collect.BoundType;
 import com.google.common.collect.Range;
 import com.google.common.collect.RangeSet;
 import com.google.common.collect.TreeRangeSet;
 
-import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
-
 /**
  * Placeholder for Util methods that will be required for {@link Candidate}
  */
@@ -113,21 +129,12 @@ public class CandidateUtil {
   }
 
   public static Set<StorageCandidate> getStorageCandidates(final Candidate candidate) {
-    return getStorageCandidates(new HashSet<Candidate>(1) {{
-      add(candidate);
-    }});
+    return getStorageCandidates(new HashSet<Candidate>(1) {{ add(candidate); }});
   }
 
-  /**
-   *
-   * @param candSet
-   * @param msrs
-   * @param cubeql
-   * @return
-   * @throws LensException
-   */
+
   public static Set<QueriedPhraseContext> coveredMeasures(Candidate candSet, Collection<QueriedPhraseContext> msrs,
-    CubeQueryContext cubeql) throws LensException {
+      CubeQueryContext cubeql) throws LensException {
     Set<QueriedPhraseContext> coveringSet = new HashSet<>();
     for (QueriedPhraseContext msr : msrs) {
       if (candSet.getChildren() == null) {
@@ -136,12 +143,12 @@ public class CandidateUtil {
         }
       } else {
         // TODO union : all candidates should answer
-          for (Candidate cand : candSet.getChildren()) {
-            if (msr.isEvaluable(cubeql, (StorageCandidate) cand)) {
-              coveringSet.add(msr);
-            }
+        for (Candidate cand : candSet.getChildren()) {
+          if (msr.isEvaluable(cubeql, (StorageCandidate) cand)) {
+            coveringSet.add(msr);
           }
         }
+      }
     }
     return coveringSet;
   }
@@ -190,6 +197,7 @@ public class CandidateUtil {
 
   /**
    * Gets all the Storage Candidates that participate in the collection of passed candidates
+   *
    * @param candidates
    * @return
    */
@@ -211,7 +219,7 @@ public class CandidateUtil {
     }
   }
 
-  public static StorageCandidate cloneStorageCandidate(StorageCandidate sc) {
+  public static StorageCandidate cloneStorageCandidate(StorageCandidate sc) throws LensException{
     return new StorageCandidate(sc);
   }
 
@@ -222,11 +230,10 @@ public class CandidateUtil {
     }
   }
 
-  private static final String baseQueryFormat = "SELECT %s FROM %s";
-
-  public static String buildHQLString(String select, String from, String where, String groupby, String orderby, String having,
-                                      Integer limit) {
+  private static final String BASE_QUERY_FORMAT = "SELECT %s FROM %s";
 
+  public static String buildHQLString(String select, String from, String where,
+      String groupby, String orderby, String having, Integer limit) {
     List<String> qstrs = new ArrayList<String>();
     qstrs.add(select);
     qstrs.add(from);
@@ -247,7 +254,7 @@ public class CandidateUtil {
     }
 
     StringBuilder queryFormat = new StringBuilder();
-    queryFormat.append(baseQueryFormat);
+    queryFormat.append(BASE_QUERY_FORMAT);
     if (!StringUtils.isBlank(where)) {
       queryFormat.append(" WHERE %s");
     }
@@ -307,15 +314,4 @@ public class CandidateUtil {
     }
     return false;
   }
-
-
-  public static Set<String> getMissingPartitions(StorageCandidate sc) {
-    Set<String> missingParts = new HashSet<>();
-    for (FactPartition part : sc.getParticipatingPartitions()) {
-      if (!part.isFound()) {
-        missingParts.add(part.toString()); //TODOD union . add approprite partition String
-      }
-    }
-    return missingParts;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 470d6e7..f602c5f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -36,7 +36,6 @@ import org.apache.lens.cube.error.NoCandidateDimAvailableException;
 import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.join.TableRelationship;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.join.AutoJoinContext;
 import org.apache.lens.cube.parse.join.JoinClause;
 import org.apache.lens.cube.parse.join.JoinTree;
@@ -180,10 +179,6 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
   @Getter
   @Setter
   private DenormalizationResolver.DenormalizationContext deNormCtx;
-  //TODO union : deprecate factPruningMsgs
-  @Getter
-  @Deprecated
-  private PruneCauses<CubeFactTable> factPruningMsgs = new PruneCauses<>();
   @Getter
   private PruneCauses<StorageCandidate>  storagePruningMsgs = new PruneCauses<>();
   @Getter
@@ -346,7 +341,6 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
         return false;
       }
     } catch (LensException e) {
-      //TODO: check if catch can be removed
       return false;
     }
     return true;
@@ -486,26 +480,6 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
     return candidateDims;
   }
 
-  /**
-   * TODO union : deprecate this method and use
-   * {@link # addFactPruningMsg(CubeInterface, CubeFactTable, CandidateTablePruneCause)}
-   * or
-   * {@link #addStoragePruningMsg(StorageCandidate, CandidateTablePruneCause)}
-   * */
-  @Deprecated
-  public void addFactPruningMsgs(CubeFactTable fact, CandidateTablePruneCause factPruningMsg) {
-    throw new IllegalStateException("This method is deprecate");
-  }
-
-  //TODO union : not required as all the pruning happening at StorageCandidate
-  /*
-  public void addFactPruningMsg(CubeInterface cube, CubeFactTable fact, CandidateTablePruneCause factPruningMsg) {
-    log.info("Pruning fact {} with cause: {}", fact, factPruningMsg);
-    for (String storageName : fact.getStorages()) {
-      addStoragePruningMsg(new StorageCandidate(cube, fact, storageName), factPruningMsg);
-    }
-  }
-*/
   public void addCandidatePruningMsg(Collection<Candidate> candidateCollection, CandidateTablePruneCause pruneCause) {
     for (Candidate c : candidateCollection){
       addCandidatePruningMsg(c, pruneCause);
@@ -735,7 +709,8 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
     qb.getParseInfo().setDestLimit(getClause(), 0, value);
   }
 
-  private String getStorageStringWithAlias(StorageCandidate candidate, Map<Dimension, CandidateDim> dimsToQuery, String alias) {
+  private String getStorageStringWithAlias(StorageCandidate candidate, Map<Dimension,
+      CandidateDim> dimsToQuery, String alias) {
     if (cubeTbls.get(alias) instanceof CubeInterface) {
       return candidate.getAliasForTable(alias);
     } else {
@@ -815,7 +790,6 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
     }
   }
 
-  // TODO union : Reevaluate this method.
   void setNonexistingParts(Map<String, Set<String>> nonExistingParts) throws LensException {
     if (!nonExistingParts.isEmpty()) {
       ByteArrayOutputStream out = null;
@@ -912,7 +886,8 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
             }
           }
         }
-        log.error("Query rewrite failed due to NO_CANDIDATE_FACT_AVAILABLE, Cause {}", storagePruningMsgs.toJsonObject());
+        log.error("Query rewrite failed due to NO_CANDIDATE_FACT_AVAILABLE, Cause {}",
+            storagePruningMsgs.toJsonObject());
         throw new NoCandidateFactAvailableException(storagePruningMsgs);
       }
     }
@@ -922,7 +897,6 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
   private HQLContextInterface hqlContext;
 
   @Getter
-  //TODO union : This will be the final Candidate . private Candidate pickedCandidate
   private Candidate pickedCandidate;
   @Getter
   private Collection<CandidateDim> pickedDimTables;
@@ -956,10 +930,9 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
 
     Map<StorageCandidate, Set<Dimension>> factDimMap = new HashMap<>();
     if (cand != null) {
-      // copy ASTs for each storage candidate
+      // Set the default queryAST for StorageCandidate and copy child ASTs from cubeql.
+      // Later in the rewrite flow each Storage candidate will modify them accordingly.
       for (StorageCandidate sc : scSet) {
-        // Set the default queryAST for StorageCandidate and copy child ASTs from cubeql.
-        // Later in the rewrite flow each Storage candidate will modify them accordingly.
         sc.setQueryAst(DefaultQueryAST.fromStorageCandidate(sc, this));
         CandidateUtil.copyASTs(this, sc.getQueryAst());
         factDimMap.put(sc, new HashSet<>(dimsToQuery.keySet()));
@@ -1046,10 +1019,10 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
     } else if (cand instanceof StorageCandidate) {
       StorageCandidate sc = (StorageCandidate) cand;
       sc.updateAnswerableSelectColumns(this);
-      return getInsertClause() + sc.toHQL();
+      return getInsertClause() + sc.toHQL(factDimMap.get(sc));
     } else {
       UnionQueryWriter uqc = new UnionQueryWriter(cand, this);
-      return getInsertClause() + uqc.toHQL();
+      return getInsertClause() + uqc.toHQL(factDimMap);
     }
   }
 
@@ -1232,63 +1205,6 @@ public class CubeQueryContext extends TracksQueriedColumns implements QueryAST {
     queriedExprsWithMeasures.addAll(exprs);
   }
 
-  /**
-   * Prune candidate fact sets with respect to available candidate facts.
-   * <p></p>
-   * Prune a candidate set, if any of the fact is missing.
-   *
-   */
-  //TODO union : deprecated
-  @Deprecated
-  /*
-  public void pruneCandidateFactSet(CandidateTablePruneCode pruneCause) {
-    // remove candidate fact sets that have missing facts
-    for (Iterator<Set<CandidateFact>> i = candidateFactSets.iterator(); i.hasNext();) {
-      Set<CandidateFact> cfacts = i.next();
-      if (!candidateFacts.containsAll(cfacts)) {
-        log.info("Not considering fact table set:{} as they have non candidate tables and facts missing because of {}",
-          cfacts, pruneCause);
-        i.remove();
-      }
-    }
-    // prune candidate facts
-    pruneCandidateFactWithCandidateSet(CandidateTablePruneCode.ELEMENT_IN_SET_PRUNED);
-  }
-*/
-  /**
-   * Prune candidate fact with respect to available candidate fact sets.
-   * <p></p>
-   * If candidate fact is not present in any of the candidate fact sets, remove it.
-   *
-   * @param pruneCause
-   */
-/*
-  public void pruneCandidateFactWithCandidateSet(CandidateTablePruneCode pruneCause) {
-    // remove candidate facts that are not part of any covering set
-    pruneCandidateFactWithCandidateSet(new CandidateTablePruneCause(pruneCause));
-  }
-*/
-  //TODO union : deprecated
-  /*
-  @Deprecated
-
-  public void pruneCandidateFactWithCandidateSet(CandidateTablePruneCause pruneCause) {
-    // remove candidate facts that are not part of any covering set
-    Set<CandidateFact> allCoveringFacts = new HashSet<CandidateFact>();
-    for (Set<CandidateFact> set : candidateFactSets) {
-      allCoveringFacts.addAll(set);
-    }
-    for (Iterator<CandidateFact> i = candidateFacts.iterator(); i.hasNext();) {
-      CandidateFact cfact = i.next();
-      if (!allCoveringFacts.contains(cfact)) {
-        log.info("Not considering fact table:{} as {}", cfact, pruneCause);
-        addFactPruningMsgs(cfact.fact, pruneCause);
-        i.remove();
-      }
-    }
-  }
-*/
-
   public void addQueriedTimeDimensionCols(final String timeDimColName) {
 
     checkArgument(StringUtils.isNotBlank(timeDimColName));

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
index abd909f..4dd3d00 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
@@ -142,6 +142,7 @@ public class CubeQueryRewriter {
     DenormalizationResolver denormResolver = new DenormalizationResolver(conf);
     CandidateTableResolver candidateTblResolver = new CandidateTableResolver(conf);
     StorageTableResolver storageTableResolver = new StorageTableResolver(conf);
+    // Resolve expressions
     rewriters.add(exprResolver);
     // De-normalized columns resolved
     rewriters.add(denormResolver);
@@ -154,7 +155,8 @@ public class CubeQueryRewriter {
     rewriters.add(new GroupbyResolver(conf));
     rewriters.add(new FieldValidator());
     rewriters.add(storageTableResolver);
-    //TODO union: Add CoveringSetResolver which creates UnionCandidates and JoinCandidates. Some code form candidateTblResolver(phase 2) to be moved to CoveringSetResolver
+    //TODO union: Add CoveringSetResolver which creates UnionCandidates and JoinCandidates.
+    //TODO union: Some code form candidateTblResolver(phase 2) to be moved to CoveringSetResolver
     //TODO union: AggregateResolver,GroupbyResolver,FieldValidator before CoveringSetResolver
     // Resolve joins and generate base join tree
     rewriters.add(new JoinResolver(conf));

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
index cd44235..c8bf787 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
@@ -18,10 +18,10 @@
  */
 package org.apache.lens.cube.parse;
 
-import lombok.Getter;
-import lombok.Setter;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 
+import lombok.Getter;
+import lombok.Setter;
 
 public class DefaultAliasDecider implements AliasDecider {
   @Getter

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index 646dbd6..bb29034 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -240,13 +240,13 @@ public class DenormalizationResolver implements ContextRewriter {
     private void replaceReferencedColumns(StorageCandidate sc, boolean replaceFact) throws LensException {
       QueryAST ast = cubeql;
       boolean factRefExists = sc != null && tableToRefCols.get(sc.getName()) != null && !tableToRefCols.get(sc
-        .getName()).isEmpty();
+          .getName()).isEmpty();
       if (replaceFact && factRefExists) {
         ast = sc.getQueryAst();
       }
       resolveClause(cubeql, ast.getSelectAST());
       if (factRefExists) {
-          resolveClause(cubeql, sc.getQueryAst().getWhereAST());
+        resolveClause(cubeql, sc.getQueryAst().getWhereAST());
       } else {
         resolveClause(cubeql, ast.getWhereAST());
       }
@@ -347,18 +347,17 @@ public class DenormalizationResolver implements ContextRewriter {
         for (Iterator<StorageCandidate> i =
              CandidateUtil.getStorageCandidates(cubeql.getCandidates()).iterator(); i.hasNext();) {
           StorageCandidate sc = i.next();
-          //TODO union : is this happening in pahse 1 or 2 ?
-          //TODO union : If phase 2, the below code will not work. Move to phase1 in that case
-            if (denormCtx.tableToRefCols.containsKey(sc.getFact().getName())) {
-              for (ReferencedQueriedColumn refcol : denormCtx.tableToRefCols.get(sc.getFact().getName())) {
-                if (denormCtx.getReferencedCols().get(refcol.col.getName()).isEmpty()) {
-                  log.info("Not considering storage candidate :{} as column {} is not available", sc, refcol.col);
-                  cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(refcol.col.getName()));
-                  Collection<Candidate> prunedCandidates = CandidateUtil.filterCandidates(cubeql.getCandidates(), sc);
-                  cubeql.addCandidatePruningMsg(prunedCandidates,
-                      new CandidateTablePruneCause(CandidateTablePruneCode.ELEMENT_IN_SET_PRUNED));
-                }
+          if (denormCtx.tableToRefCols.containsKey(sc.getFact().getName())) {
+            for (ReferencedQueriedColumn refcol : denormCtx.tableToRefCols.get(sc.getFact().getName())) {
+              if (denormCtx.getReferencedCols().get(refcol.col.getName()).isEmpty()) {
+                log.info("Not considering storage candidate :{} as column {} is not available", sc, refcol.col);
+                cubeql.addStoragePruningMsg(sc, CandidateTablePruneCause.columnNotFound(
+                    CandidateTablePruneCode.DENORM_COLUMN_NOT_FOUND, refcol.col.getName()));
+                Collection<Candidate> prunedCandidates = CandidateUtil.filterCandidates(cubeql.getCandidates(), sc);
+                cubeql.addCandidatePruningMsg(prunedCandidates,
+                    new CandidateTablePruneCause(CandidateTablePruneCode.ELEMENT_IN_SET_PRUNED));
               }
+            }
           }
         }
         if (cubeql.getCandidates().size() == 0) {
@@ -376,7 +375,8 @@ public class DenormalizationResolver implements ContextRewriter {
                 if (denormCtx.getReferencedCols().get(refcol.col.getName()).isEmpty()) {
                   log.info("Not considering dim table:{} as column {} is not available", cdim, refcol.col);
                   cubeql.addDimPruningMsgs(dim, cdim.dimtable,
-                    CandidateTablePruneCause.columnNotFound(refcol.col.getName()));
+                    CandidateTablePruneCause.columnNotFound(CandidateTablePruneCode.DENORM_COLUMN_NOT_FOUND,
+                        refcol.col.getName()));
                   i.remove();
                 }
               }

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 0cf4b1c..82113af 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -25,7 +25,6 @@ import java.util.*;
 
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
 import org.apache.lens.cube.parse.HQLParser.TreeNode;
 import org.apache.lens.server.api.error.LensException;
@@ -450,7 +449,7 @@ class ExpressionResolver implements ContextRewriter {
       throws LensException {
       replaceAST(cubeql, queryAST.getSelectAST());
       if (sc != null) {
-          replaceAST(cubeql, sc.getQueryAst().getWhereAST());
+        replaceAST(cubeql, sc.getQueryAst().getWhereAST());
       } else {
         replaceAST(cubeql, queryAST.getWhereAST());
       }
@@ -652,11 +651,13 @@ class ExpressionResolver implements ContextRewriter {
           for (ExpressionContext ec : ecSet) {
             if (ec.getSrcTable().getName().equals(cubeql.getCube().getName())) {
               if (cubeql.getQueriedExprsWithMeasures().contains(expr)) {
-                for (Iterator<Candidate> sItr = cubeql.getCandidates().iterator(); sItr.hasNext(); ) {
+                for (Iterator<Candidate> sItr = cubeql.getCandidates().iterator(); sItr.hasNext();) {
                   Candidate cand = sItr.next();
                   if (!cand.isExpressionEvaluable(ec)) {
                     log.info("Not considering Candidate :{} as {} is not evaluable", cand, ec.exprCol.getName());
                     sItr.remove();
+                    cubeql.addCandidatePruningMsg(cand,
+                        CandidateTablePruneCause.expressionNotEvaluable(ec.exprCol.getName()));
                   }
                 }
               } else {
@@ -672,11 +673,11 @@ class ExpressionResolver implements ContextRewriter {
                         CandidateTablePruneCause.expressionNotEvaluable(ec.exprCol.getName()));
                   }
                 }
+              }
             }
           }
         }
       }
-    }
       // prune candidate dims without any valid expressions
       if (cubeql.getDimensions() != null && !cubeql.getDimensions().isEmpty()) {
         for (Dimension dim : cubeql.getDimensions()) {

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
index 6ccf3d8..c9dc7b2 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
@@ -18,13 +18,11 @@
  */
 package org.apache.lens.cube.parse;
 
-import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 import static org.apache.lens.cube.parse.ColumnResolver.addColumnsForSelectExpr;
 
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
+
+import java.util.*;
 
 import org.apache.lens.cube.metadata.AbstractBaseTable;
 import org.apache.lens.server.api.error.LensException;

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
index d89e7b4..fa3ba8f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
 package org.apache.lens.cube.parse;
 
 import java.util.*;
@@ -55,16 +73,17 @@ public class JoinCandidate implements Candidate {
   public boolean contains(Candidate candidate) {
     if (this.equals(candidate)) {
       return true;
-    } else
+    } else {
       return childCandidate1.contains(candidate) || childCandidate2.contains(candidate);
+    }
   }
 
   @Override
   public Collection<Candidate> getChildren() {
-    return new ArrayList() {{
-      add(childCandidate1);
-      add(childCandidate2);
-    }};
+    ArrayList<Candidate> joinCandidates = new ArrayList<>();
+    joinCandidates.add(childCandidate1);
+    joinCandidates.add(childCandidate2);
+    return joinCandidates;
   }
 
   /**
@@ -73,7 +92,7 @@ public class JoinCandidate implements Candidate {
    */
   @Override
   public boolean evaluateCompleteness(TimeRange timeRange, TimeRange parentTimeRange, boolean failOnPartialData)
-      throws LensException {
+    throws LensException {
     return this.childCandidate1.evaluateCompleteness(timeRange, parentTimeRange, failOnPartialData)
         && this.childCandidate2.evaluateCompleteness(timeRange, parentTimeRange, failOnPartialData);
   }
@@ -114,4 +133,4 @@ public class JoinCandidate implements Candidate {
   private String getToString() {
     return this.toStr = "JOIN[" + childCandidate1.toString() + ", " + childCandidate2.toString() + "]";
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
index cb1cd65..153df24 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
@@ -52,7 +52,7 @@ class LeastPartitionResolver implements ContextRewriter {
 
       double minPartitions = Collections.min(factPartCount.values());
 
-      for (Iterator<Candidate> i = cubeql.getCandidates().iterator(); i.hasNext(); ) {
+      for (Iterator<Candidate> i = cubeql.getCandidates().iterator(); i.hasNext();) {
         Candidate candidate = i.next();
         if (factPartCount.get(candidate) > minPartitions) {
           log.info("Not considering Candidate:{} as it requires more partitions to be" + " queried:{} minimum:{}",

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
index 2522d92..4664cde 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
@@ -95,8 +95,7 @@ class MaxCoveringFactResolver implements ContextRewriter {
         }
       }
     }
-    //  cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.lessData(null));
-
+    //cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.lessData(null));
   }
 
   private void resolveByDataCompleteness(CubeQueryContext cubeql) {

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
index c17e5bf..0c6465a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
@@ -23,7 +23,6 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.lens.cube.metadata.AbstractCubeTable;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 
 import org.apache.commons.lang.StringUtils;
@@ -101,9 +100,9 @@ public class PruneCauses<T> extends HashMap<T, List<CandidateTablePruneCause>> {
       }
     }
     Map<CandidateTablePruneCause, String> maxCauseMap = Maps.newHashMap();
-    for (Map.Entry<CandidateTablePruneCause, List<T>> entry: getReversed().entrySet()) {
+    for (Map.Entry<CandidateTablePruneCause, List<T>> entry : getReversed().entrySet()) {
       if (entry.getKey().getCause().equals(maxCause)) {
-          maxCauseMap.put(entry.getKey(), StringUtils.join(entry.getValue(), ","));
+        maxCauseMap.put(entry.getKey(), StringUtils.join(entry.getValue(), ","));
       }
     }
     return maxCause.getBriefError(maxCauseMap.keySet());

http://git-wip-us.apache.org/repos/asf/lens/blob/975fa2c2/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
index b011e47..832b7a4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
@@ -101,7 +101,6 @@ class QueriedPhraseContext extends TracksQueriedColumns implements TrackQueriedC
   }
 
   /**
-   * TODO union: change CandidateFact to StorageCandidate. Let the callers typecast and send for now.
    * @param cubeQl
    * @param sc
    * @return
@@ -139,7 +138,7 @@ class QueriedPhraseContext extends TracksQueriedColumns implements TrackQueriedC
 
   private static boolean isColumnAvailableInRange(final TimeRange range, Date startTime, Date endTime) {
     return (isColumnAvailableFrom(range.getFromDate(), startTime)
-      && isColumnAvailableTill(range.getToDate(), endTime));
+        && isColumnAvailableTill(range.getToDate(), endTime));
   }
 
   private static boolean isColumnAvailableFrom(@NonNull final Date date, Date startTime) {
@@ -151,7 +150,7 @@ class QueriedPhraseContext extends TracksQueriedColumns implements TrackQueriedC
   }
 
   public static boolean isFactColumnValidForRange(CubeQueryContext cubeql, StorageCandidate sc, String col) {
-    for(TimeRange range : cubeql.getTimeRanges()) {
+    for (TimeRange range : cubeql.getTimeRanges()) {
       if (!isColumnAvailableInRange(range, getFactColumnStartTime(sc, col), getFactColumnEndTime(sc, col))) {
         return false;
       }
@@ -161,32 +160,32 @@ class QueriedPhraseContext extends TracksQueriedColumns implements TrackQueriedC
 
   public static Date getFactColumnStartTime(StorageCandidate sc, String factCol) {
     Date startTime = null;
-      for (String key : sc.getTable().getProperties().keySet()) {
-        if (key.contains(MetastoreConstants.FACT_COL_START_TIME_PFX)) {
-          String propCol = StringUtils.substringAfter(key, MetastoreConstants.FACT_COL_START_TIME_PFX);
-          if (factCol.equals(propCol)) {
-            startTime = sc.getTable().getDateFromProperty(key, false, true);
-          }
+    for (String key : sc.getTable().getProperties().keySet()) {
+      if (key.contains(MetastoreConstants.FACT_COL_START_TIME_PFX)) {
+        String propCol = StringUtils.substringAfter(key, MetastoreConstants.FACT_COL_START_TIME_PFX);
+        if (factCol.equals(propCol)) {
+          startTime = sc.getTable().getDateFromProperty(key, false, true);
         }
       }
+    }
     return startTime;
   }
 
   public static Date getFactColumnEndTime(StorageCandidate sc, String factCol) {
     Date endTime = null;
-      for (String key : sc.getTable().getProperties().keySet()) {
-        if (key.contains(MetastoreConstants.FACT_COL_END_TIME_PFX)) {
-          String propCol = StringUtils.substringAfter(key, MetastoreConstants.FACT_COL_END_TIME_PFX);
-          if (factCol.equals(propCol)) {
-            endTime = sc.getTable().getDateFromProperty(key, false, true);
-          }
+    for (String key : sc.getTable().getProperties().keySet()) {
+      if (key.contains(MetastoreConstants.FACT_COL_END_TIME_PFX)) {
+        String propCol = StringUtils.substringAfter(key, MetastoreConstants.FACT_COL_END_TIME_PFX);
+        if (factCol.equals(propCol)) {
+          endTime = sc.getTable().getDateFromProperty(key, false, true);
         }
       }
-  return endTime;
+    }
+    return endTime;
   }
 
   static boolean checkForColumnExistsAndValidForRange(StorageCandidate sc, String column, CubeQueryContext cubeql) {
-    return (sc.getColumns().contains(column) &&  isFactColumnValidForRange(cubeql, sc, column));
+    return (sc.getColumns().contains(column) && isFactColumnValidForRange(cubeql, sc, column));
   }
 
 }