You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by sh...@apache.org on 2015/12/30 08:10:20 UTC

[01/50] [abbrv] lens git commit: LENS-858 : No cubes are shown in front end.

Repository: lens
Updated Branches:
  refs/heads/LENS-581 5052e2a24 -> 25a17dfc9


LENS-858 : No cubes are shown in front end.


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/ffc9987e
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/ffc9987e
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/ffc9987e

Branch: refs/heads/LENS-581
Commit: ffc9987ec7ad24383348750a63dc79ffe4e4c7ff
Parents: 6409042
Author: Ankeet Maini <an...@gmail.com>
Authored: Mon Nov 9 16:27:43 2015 +0530
Committer: Deepak Kumar Barr <de...@gmail.com>
Committed: Mon Nov 9 16:27:43 2015 +0530

----------------------------------------------------------------------
 lens-ui/app/components/CubeTreeComponent.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/ffc9987e/lens-ui/app/components/CubeTreeComponent.js
----------------------------------------------------------------------
diff --git a/lens-ui/app/components/CubeTreeComponent.js b/lens-ui/app/components/CubeTreeComponent.js
index e348898..e288476 100644
--- a/lens-ui/app/components/CubeTreeComponent.js
+++ b/lens-ui/app/components/CubeTreeComponent.js
@@ -122,7 +122,7 @@ class CubeTree extends React.Component {
 
     if (this.state.loading) {
       cubeTree = <Loader size='4px' margin='2px'/>;
-    } else if (!this.state.cubes.length) {
+    } else if (!Object.keys(this.state.cubes).length) {
       cubeTree = (<div className='alert-danger' style={{padding: '8px 5px'}}>
           <strong>Sorry, we couldn&#39;t find any cubes.</strong>
         </div>);


[14/50] [abbrv] lens git commit: First Commit, added SushilMohanty to developers list

Posted by sh...@apache.org.
First Commit, added SushilMohanty to developers list


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/f41c176b
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/f41c176b
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/f41c176b

Branch: refs/heads/LENS-581
Commit: f41c176b473ac014a0c3b24f16e82444a445f28b
Parents: e5691d8
Author: Sushil Mohanty <su...@apache.org>
Authored: Tue Nov 24 12:42:25 2015 +0530
Committer: Sushil Mohanty <su...@apache.org>
Committed: Tue Nov 24 12:42:25 2015 +0530

----------------------------------------------------------------------
 pom.xml | 9 +++++++++
 1 file changed, 9 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/f41c176b/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 9b417d9..b19857f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -342,6 +342,15 @@
         <role>Committer</role>
       </roles>
     </developer>
+    <developer>
+      <id>sushilmohanty</id>
+      <email>sushilmohanty@apache.org</email>
+      <name>SushilMohanty</name>
+      <timezone>+5.5</timezone>
+      <roles>
+        <role>Committer</role>
+      </roles>
+    </developer>
   </developers>
   <scm>
     <connection>scm:git:https://git-wip-us.apache.org/repos/asf/lens.git</connection>


[15/50] [abbrv] lens git commit: LENS-865 : Fix test failures

Posted by sh...@apache.org.
LENS-865 : Fix test failures


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/b66592ce
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/b66592ce
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/b66592ce

Branch: refs/heads/LENS-581
Commit: b66592ce9b685c5a9f8c7b0b855032fc53d2bab4
Parents: f41c176
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Tue Nov 24 16:57:52 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Nov 24 16:57:52 2015 +0530

----------------------------------------------------------------------
 .../lens/cube/metadata/TestCubeMetastoreClient.java       | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/b66592ce/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index 6b6f645..d938ff7 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -2047,9 +2047,9 @@ public class TestCubeMetastoreClient {
     assertEquals(parts.get(0).getParameters().get(MetastoreUtil.getLatestPartTimestampKey("dt")),
       HOURLY.format().format(nowPlus1));
 
-    client.dropPartition(cubeDim.getName(), c1, timeParts, null, HOURLY);
-    Assert.assertTrue(client.dimPartitionExists(cubeDim.getName(), c1, timeParts2));
-    Assert.assertFalse(client.dimPartitionExists(cubeDim.getName(), c1, timeParts));
+    client.dropPartition(cubeDim.getName(), c1, timeParts2, null, HOURLY);
+    Assert.assertTrue(client.dimPartitionExists(cubeDim.getName(), c1, timeParts));
+    Assert.assertFalse(client.dimPartitionExists(cubeDim.getName(), c1, timeParts2));
     Assert
       .assertTrue(client.latestPartitionExists(cubeDim.getName(), c1, TestCubeMetastoreClient.getDatePartitionKey()));
     Assert.assertTrue(client.dimTableLatestPartitionExists(storageTableName));
@@ -2057,10 +2057,10 @@ public class TestCubeMetastoreClient {
     assertEquals(1, parts.size());
     assertEquals(TextInputFormat.class.getCanonicalName(), parts.get(0).getInputFormatClass().getCanonicalName());
     assertEquals(parts.get(0).getParameters().get(MetastoreUtil.getLatestPartTimestampKey("dt")),
-      HOURLY.format().format(nowPlus1));
+      HOURLY.format().format(now));
     assertEquals(client.getAllParts(storageTableName).size(), 2);
 
-    client.dropPartition(cubeDim.getName(), c1, timeParts2, null, HOURLY);
+    client.dropPartition(cubeDim.getName(), c1, timeParts, null, HOURLY);
     Assert.assertFalse(client.dimPartitionExists(cubeDim.getName(), c1, timeParts2));
     Assert.assertFalse(client.dimPartitionExists(cubeDim.getName(), c1, timeParts));
     Assert.assertFalse(client.latestPartitionExists(cubeDim.getName(), c1,


[38/50] [abbrv] lens git commit: LENS-760 : Session close should not result in running query failures.

Posted by sh...@apache.org.
LENS-760 : Session close should not result in running query failures.


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/ff891e2c
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/ff891e2c
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/ff891e2c

Branch: refs/heads/LENS-581
Commit: ff891e2cf2a77fd28a7476ad6a6af814bb013661
Parents: 7c7c86d
Author: Deepak Barr <de...@apache.org>
Authored: Sat Dec 12 00:17:47 2015 +0530
Committer: Deepak Kumar Barr <de...@apache.org>
Committed: Sat Dec 12 00:17:47 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/driver/hive/HiveDriver.java | 95 +++++++++++++++-----
 .../lens/driver/hive/TestRemoteHiveDriver.java  |  4 +-
 .../lens/server/query/TestQueryService.java     | 20 +++++
 3 files changed, 98 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/ff891e2c/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index a84c679..253cfc4 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -112,6 +112,12 @@ public class HiveDriver extends AbstractLensDriver {
   /** The hive handles. */
   private Map<QueryHandle, OperationHandle> hiveHandles = new ConcurrentHashMap<QueryHandle, OperationHandle>();
 
+  /** The orphaned hive sessions. */
+  private ConcurrentLinkedQueue<SessionHandle> orphanedHiveSessions;
+
+  /** The opHandle to hive session map. */
+  private Map<OperationHandle, SessionHandle> opHandleToSession;
+
   /** The session lock. */
   private final Lock sessionLock;
 
@@ -314,6 +320,8 @@ public class HiveDriver extends AbstractLensDriver {
   public HiveDriver() throws LensException {
     this.sessionLock = new ReentrantLock();
     lensToHiveSession = new HashMap<String, SessionHandle>();
+    opHandleToSession = new ConcurrentHashMap<OperationHandle, SessionHandle>();
+    orphanedHiveSessions = new ConcurrentLinkedQueue<SessionHandle>();
     resourcesAddedForSession = new HashMap<SessionHandle, Boolean>();
     connectionExpiryThread.setDaemon(true);
     connectionExpiryThread.setName("HiveDriver-ConnectionExpiryThread");
@@ -491,15 +499,18 @@ public class HiveDriver extends AbstractLensDriver {
    */
   // assuming this is only called for executing explain/insert/set/delete/etc... queries which don't ask to fetch data.
   public LensResultSet execute(QueryContext ctx) throws LensException {
+    OperationHandle op = null;
     try {
       addPersistentPath(ctx);
       Configuration qdconf = ctx.getDriverConf(this);
       qdconf.set("mapred.job.name", ctx.getQueryHandle().toString());
-      OperationHandle op = getClient().executeStatement(getSession(ctx), ctx.getSelectedDriverQuery(),
+      SessionHandle sessionHandle = getSession(ctx);
+      op = getClient().executeStatement(sessionHandle, ctx.getSelectedDriverQuery(),
         qdconf.getValByRegex(".*"));
       log.info("The hive operation handle: {}", op);
       ctx.setDriverOpHandle(op.toString());
       hiveHandles.put(ctx.getQueryHandle(), op);
+      opHandleToSession.put(op, sessionHandle);
       updateStatus(ctx);
       OperationStatus status = getClient().getOperationStatus(op);
 
@@ -519,6 +530,10 @@ public class HiveDriver extends AbstractLensDriver {
     } catch (HiveSQLException hiveErr) {
       handleHiveServerError(ctx, hiveErr);
       throw new LensException("Error executing query", hiveErr);
+    } finally {
+      if (null != op) {
+        opHandleToSession.remove(op);
+      }
     }
   }
 
@@ -550,11 +565,13 @@ public class HiveDriver extends AbstractLensDriver {
         }
       }
       queryHook.preLaunch(ctx);
-      OperationHandle op = getClient().executeStatementAsync(getSession(ctx), ctx.getSelectedDriverQuery(),
+      SessionHandle sessionHandle = getSession(ctx);
+      OperationHandle op = getClient().executeStatementAsync(sessionHandle, ctx.getSelectedDriverQuery(),
         qdconf.getValByRegex(".*"));
       ctx.setDriverOpHandle(op.toString());
       log.info("QueryHandle: {} HiveHandle:{}", ctx.getQueryHandle(), op);
       hiveHandles.put(ctx.getQueryHandle(), op);
+      opHandleToSession.put(op, sessionHandle);
     } catch (IOException e) {
       throw new LensException("Error adding persistent path", e);
     } catch (HiveSQLException e) {
@@ -726,6 +743,18 @@ public class HiveDriver extends AbstractLensDriver {
       } catch (HiveSQLException e) {
         checkInvalidOperation(handle, e);
         throw new LensException("Unable to close query", e);
+      } finally {
+        SessionHandle hiveSession = opHandleToSession.remove(opHandle);
+        if (null != hiveSession && !opHandleToSession.containsValue(hiveSession)
+          && orphanedHiveSessions.contains(hiveSession)) {
+          orphanedHiveSessions.remove(hiveSession);
+          try {
+            getClient().closeSession(hiveSession);
+            log.info("Closed orphaned hive session : {}", hiveSession.getHandleIdentifier());
+          } catch (HiveSQLException e) {
+            log.warn("Error closing orphan hive session : {} ", hiveSession.getHandleIdentifier(), e);
+          }
+        }
       }
     }
   }
@@ -739,6 +768,7 @@ public class HiveDriver extends AbstractLensDriver {
   public boolean cancelQuery(QueryHandle handle) throws LensException {
     log.info("CancelQuery: {}", handle);
     OperationHandle hiveHandle = getHiveHandle(handle);
+    opHandleToSession.remove(hiveHandle);
     try {
       log.info("CancelQuery hiveHandle: {}", hiveHandle);
       getClient().cancelOperation(hiveHandle);
@@ -757,22 +787,11 @@ public class HiveDriver extends AbstractLensDriver {
   @Override
   public void close() {
     log.info("CloseDriver {}", getFullyQualifiedName());
-    // Close this driver and release all resources
+    // Close this driver
     sessionLock.lock();
-    try {
-      for (String lensSessionDbKey : lensToHiveSession.keySet()) {
-        try {
-          getClient().closeSession(lensToHiveSession.get(lensSessionDbKey));
-        } catch (Exception e) {
-          checkInvalidSession(e);
-          log.warn("Error closing session for lens session: {}, hive session: ", lensSessionDbKey,
-            lensToHiveSession.get(lensSessionDbKey), e);
-        }
-      }
-      lensToHiveSession.clear();
-    } finally {
-      sessionLock.unlock();
-    }
+    lensToHiveSession.clear();
+    orphanedHiveSessions.clear();
+    sessionLock.unlock();
   }
 
   /**
@@ -1087,6 +1106,21 @@ public class HiveDriver extends AbstractLensDriver {
       }
       log.info("Hive driver {} recovered {} sessions", getFullyQualifiedName(), lensToHiveSession.size());
     }
+    int numOpHandles = in.readInt();
+    for (int i = 0; i < numOpHandles; i++) {
+      OperationHandle opHandle = new OperationHandle((TOperationHandle) in.readObject());
+      SessionHandle sHandle = new SessionHandle((TSessionHandle) in.readObject(),
+        TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6);
+      opHandleToSession.put(opHandle, sHandle);
+    }
+    log.info("Hive driver {} recovered {} operation handles", getFullyQualifiedName(), opHandleToSession.size());
+    int numOrphanedSessions = in.readInt();
+    for (int i = 0; i < numOrphanedSessions; i++) {
+      SessionHandle sHandle = new SessionHandle((TSessionHandle) in.readObject(),
+        TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6);
+      orphanedHiveSessions.add(sHandle);
+    }
+    log.info("Hive driver {} recovered {} orphaned sessions", getFullyQualifiedName(), orphanedHiveSessions.size());
   }
 
   /*
@@ -1111,6 +1145,17 @@ public class HiveDriver extends AbstractLensDriver {
         out.writeObject(entry.getValue().toTSessionHandle());
       }
       log.info("Hive driver {} persisted {} sessions", getFullyQualifiedName(), lensToHiveSession.size());
+      out.writeInt(opHandleToSession.size());
+      for (Map.Entry<OperationHandle, SessionHandle> entry : opHandleToSession.entrySet()) {
+        out.writeObject(entry.getKey().toTOperationHandle());
+        out.writeObject(entry.getValue().toTSessionHandle());
+      }
+      log.info("Hive driver {} persisted {} operation handles", getFullyQualifiedName(), opHandleToSession.size());
+      out.writeInt(orphanedHiveSessions.size());
+      for (SessionHandle sessionHandle : orphanedHiveSessions) {
+        out.writeObject(sessionHandle.toTSessionHandle());
+      }
+      log.info("Hive driver {} persisted {} orphaned sessions", getFullyQualifiedName(), orphanedHiveSessions.size());
     }
   }
 
@@ -1243,9 +1288,15 @@ public class HiveDriver extends AbstractLensDriver {
           SessionHandle hiveSession = lensToHiveSession.remove(sessionDbKey);
           if (hiveSession != null) {
             try {
-              getClient().closeSession(hiveSession);
-              log.info("Closed Hive session {} for lens session {}", hiveSession.getHandleIdentifier(),
-                sessionDbKey);
+              if (isSessionClosable(hiveSession)) {
+                getClient().closeSession(hiveSession);
+                log.info("Closed Hive session {} for lens session {}", hiveSession.getHandleIdentifier(),
+                  sessionDbKey);
+              } else {
+                log.info("Skipped closing hive session {} for lens session {} due to active operations",
+                  hiveSession.getHandleIdentifier(), sessionDbKey);
+                orphanedHiveSessions.add(hiveSession);
+              }
             } catch (Exception e) {
               log.error("Error closing hive session {} for lens session {}", hiveSession.getHandleIdentifier(),
                 sessionDbKey, e);
@@ -1259,6 +1310,10 @@ public class HiveDriver extends AbstractLensDriver {
     }
   }
 
+  private boolean isSessionClosable(SessionHandle hiveSession) {
+    return !opHandleToSession.containsValue(hiveSession);
+  }
+
   /**
    * Close all connections.
    */

http://git-wip-us.apache.org/repos/asf/lens/blob/ff891e2c/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
index ab5ada9..4f18c24 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
@@ -274,9 +274,11 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
 
     // Write driver to stream
     ByteArrayOutputStream driverBytes = new ByteArrayOutputStream();
+    ObjectOutputStream out = new ObjectOutputStream(driverBytes);
     try {
-      oldDriver.writeExternal(new ObjectOutputStream(driverBytes));
+      oldDriver.writeExternal(out);
     } finally {
+      out.close();
       driverBytes.close();
     }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/ff891e2c/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index f6693aa..efef358 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -1472,6 +1472,26 @@ public class TestQueryService extends LensJerseyTest {
     getLensQueryResult(target(), lensSessionId, ctx1.getQueryHandle());
   }
 
+  /**
+   * Test session close when a query is active on the session
+   *
+   * @throws Exception
+   */
+  @Test
+  public void testSessionClose() throws Exception {
+    // Query with group by, will run long enough to close the session before finish
+    String query = "select ID, IDSTR, count(*) from " + TEST_TABLE + " group by ID, IDSTR";
+    SessionService sessionService = LensServices.get().getService(HiveSessionService.NAME);
+    Map<String, String> sessionconf = new HashMap<String, String>();
+    LensSessionHandle sessionHandle = sessionService.openSession("foo", "bar", "default", sessionconf);
+    LensConf conf = getLensConf(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "true");
+    QueryHandle qHandle =
+      executeAndGetHandle(target(), Optional.of(sessionHandle), Optional.of(query), Optional.of(conf));
+    sessionService.closeSession(sessionHandle);
+    sessionHandle = sessionService.openSession("foo", "bar", "default", sessionconf);
+    waitForQueryToFinish(target(), sessionHandle, qHandle, Status.SUCCESSFUL);
+  }
+
   @AfterMethod
   private void waitForPurge() throws InterruptedException {
     waitForPurge(0, queryService.finishedQueries);


[35/50] [abbrv] lens git commit: LENS-885: Cleanup of Cube test cases

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index fea70b7..3be9406 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -19,12 +19,11 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.metadata.UpdatePeriod.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.*;
-import static org.apache.lens.cube.parse.CubeQueryConfUtil.getValidStorageTablesKey;
-import static org.apache.lens.cube.parse.CubeQueryConfUtil.getValidUpdatePeriodsKey;
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
-import static org.apache.lens.cube.parse.CubeTestSetup.getWhereForMonthlyDailyAndHourly2monthsUnionQuery;
 
 import static org.testng.Assert.*;
 
@@ -36,6 +35,7 @@ import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCause;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode;
+import org.apache.lens.server.api.LensServerAPITestUtil;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.time.DateUtils;
@@ -61,18 +61,16 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 public class TestCubeRewriter extends TestQueryRewrite {
 
-  private final String cubeName = CubeTestSetup.TEST_CUBE_NAME;
-
   private Configuration conf;
 
   @BeforeTest
   public void setupDriver() throws Exception {
-    conf = new Configuration();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C0,C1,C2");
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, true);
-    conf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, true);
-    conf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, true);
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, false);
+    conf = LensServerAPITestUtil.getConfiguration(
+      DRIVER_SUPPORTED_STORAGES, "C0,C1,C2",
+      DISABLE_AUTO_JOINS, true,
+      ENABLE_SELECT_TO_GROUPBY, true,
+      ENABLE_GROUP_BY_TO_SELECT, true,
+      DISABLE_AGGREGATE_RESOLVER, false);
   }
 
   @Override
@@ -83,7 +81,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
   @Test
   public void testQueryWithNow() throws Exception {
     LensException e = getLensExceptionInRewrite(
-      "select SUM(msr2) from testCube where" + " time_range_in(d_time, 'NOW - 2DAYS', 'NOW')", getConf());
+      "select SUM(msr2) from testCube where " + getTimeRangeString("NOW - 2DAYS", "NOW"), getConf());
     assertEquals(e.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
   }
 
@@ -93,19 +91,12 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.setClass(CubeQueryConfUtil.TIME_RANGE_WRITER_CLASS, BetweenTimeRangeWriter.class, TimeRangeWriter.class);
 
     DateFormat qFmt = new SimpleDateFormat("yyyy-MM-dd-HH:mm:ss");
-    Calendar qCal = Calendar.getInstance();
-    Date toDate = qCal.getTime();
-    String qTo = qFmt.format(toDate);
-    qCal.setTime(TWODAYS_BACK);
-    Date from2DaysBackDate = qCal.getTime();
-    String qFrom = qFmt.format(from2DaysBackDate);
-
-    CubeQueryContext rewrittenQuery = rewriteCtx("select SUM(msr15) from testCube where"
-      + " time_range_in(d_time, '" + qFrom + "', '" + qTo + "')", conf);
+    String timeRangeString;
+    timeRangeString = getTimeRangeString(DAILY, -2, 0, qFmt);
+    CubeQueryContext rewrittenQuery = rewriteCtx("select SUM(msr15) from testCube where " + timeRangeString, conf);
 
-    DateFormat fmt = UpdatePeriod.CONTINUOUS.format();
-    String to = fmt.format(toDate);
-    String from = fmt.format(from2DaysBackDate);
+    String to = getDateStringWithOffset(DAILY, 0, CONTINUOUS);
+    String from = getDateStringWithOffset(DAILY, -2, CONTINUOUS);
 
     String expected = "select SUM((testCube.msr15)) from TestQueryRewrite.c0_testFact_CONTINUOUS testcube"
       + " WHERE ((( testcube . dt ) between  '" + from + "'  and  '" + to + "' ))";
@@ -114,20 +105,17 @@ public class TestCubeRewriter extends TestQueryRewrite {
     compareQueries(rewrittenQuery.toHQL(), expected);
 
     //test with msr2 on different fact
-    rewrittenQuery = rewriteCtx("select SUM(msr2) from testCube where" + " time_range_in(d_time, '"
-      + qFrom + "', '" + qTo + "')", conf);
+    rewrittenQuery = rewriteCtx("select SUM(msr2) from testCube where " + timeRangeString, conf);
     expected = "select SUM((testCube.msr2)) from TestQueryRewrite.c0_testFact testcube"
       + " WHERE ((( testcube . dt ) between  '" + from + "'  and  '" + to + "' ))";
     System.out.println("rewrittenQuery.toHQL() " + rewrittenQuery.toHQL());
     System.out.println("expected " + expected);
     compareQueries(rewrittenQuery.toHQL(), expected);
 
-    //from date 4 days back
-    qCal.setTime(BEFORE_4_DAYS_START);
-    Date from4DaysBackDate = qCal.getTime();
-    String qFrom4DaysBackDate = qFmt.format(from4DaysBackDate);
-    LensException th = getLensExceptionInRewrite("select SUM(msr15) from testCube where"
-      + " time_range_in(d_time, '" + qFrom4DaysBackDate + "', '" + qTo + "')", getConf());
+    //from date 6 days back
+    timeRangeString = getTimeRangeString(DAILY, -6, 0, qFmt);
+    LensException th = getLensExceptionInRewrite("select SUM(msr15) from testCube where "
+      + timeRangeString, getConf());
     assertEquals(th.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
   }
 
@@ -148,8 +136,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     CubeQueryContext rewrittenQuery =
       rewriteCtx("cube select" + " SUM(msr2) from testCube where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
     String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(rewrittenQuery.toHQL(), expected);
     System.out.println("Non existing parts:" + rewrittenQuery.getNonExistingParts());
     assertNotNull(rewrittenQuery.getNonExistingParts());
@@ -160,7 +148,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     Configuration conf = getConf();
     conf.setClass(CubeQueryConfUtil.TIME_RANGE_WRITER_CLASS, AbridgedTimeRangeWriter.class, TimeRangeWriter.class);
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, false);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2,C4");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1,C2,C4");
     CubeQueryContext cubeQueryContext =
       rewriteCtx("cube select" + " SUM(msr2) from testCube where " + THIS_YEAR_RANGE, conf);
     PruneCauses<CubeFactTable> pruneCause = cubeQueryContext.getFactPruningMsgs();
@@ -181,15 +169,15 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery = rewrite("cube select" + " SUM(msr2) from testCube where " + TWO_DAYS_RANGE, getConfWithStorages(
       "C2"));
     String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     Configuration conf = getConfWithStorages("C1");
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
         getWhereForHourly2days("c1_testfact2"));
     compareQueries(hqlQuery, expected);
 
@@ -214,8 +202,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     CubeQueryContext rewrittenQuery =
       rewriteCtx("cube select" + " SUM(msr2) from derivedCube where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
     String expected =
-      getExpectedQuery(CubeTestSetup.DERIVED_CUBE_NAME, "select sum(derivedCube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(CubeTestSetup.DERIVED_CUBE_NAME, "C2_testfact"));
+      getExpectedQuery(DERIVED_CUBE_NAME, "select sum(derivedCube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(DERIVED_CUBE_NAME, "C2_testfact"));
     compareQueries(rewrittenQuery.toHQL(), expected);
     System.out.println("Non existing parts:" + rewrittenQuery.getNonExistingParts());
     assertNotNull(rewrittenQuery.getNonExistingParts());
@@ -226,41 +214,41 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     // test join
     Configuration conf = getConf();
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
+    conf.setBoolean(DISABLE_AUTO_JOINS, false);
     String hqlQuery;
 
     hqlQuery = rewrite("cube select" + " testdim2.name, SUM(msr2) from derivedCube where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(CubeTestSetup.DERIVED_CUBE_NAME, "select testdim2.name, sum(derivedCube.msr2) FROM ", " JOIN "
+      getExpectedQuery(DERIVED_CUBE_NAME, "select testdim2.name, sum(derivedCube.msr2) FROM ", " JOIN "
           + getDbName() + "c1_testdim2tbl testdim2 ON derivedCube.dim2 = "
           + " testdim2.id and (testdim2.dt = 'latest') ", null, "group by (testdim2.name)", null,
-        getWhereForDailyAndHourly2days(CubeTestSetup.DERIVED_CUBE_NAME, "c1_summary2"));
+        getWhereForDailyAndHourly2days(DERIVED_CUBE_NAME, "c1_summary2"));
     compareQueries(hqlQuery, expected);
 
     // Test that explicit join query passes with join resolver disabled
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, true);
+    conf.setBoolean(DISABLE_AUTO_JOINS, true);
     List<String> joinWhereConds = new ArrayList<String>();
     joinWhereConds.add(StorageUtil.getWherePartClause("dt", "testdim2", StorageConstants.getPartitionsForLatest()));
     hqlQuery =
       rewrite("cube select" + " testdim2.name, SUM(msr2) from derivedCube "
         + " inner join testdim2 on derivedCube.dim2 = testdim2.id " + "where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(CubeTestSetup.DERIVED_CUBE_NAME, "select testdim2.name, sum(derivedCube.msr2) FROM ",
+      getExpectedQuery(DERIVED_CUBE_NAME, "select testdim2.name, sum(derivedCube.msr2) FROM ",
         " inner JOIN " + getDbName() + "c1_testdim2tbl testdim2 ON derivedCube.dim2 = " + " testdim2.id ", null,
         "group by (testdim2.name)", joinWhereConds,
-        getWhereForDailyAndHourly2days(CubeTestSetup.DERIVED_CUBE_NAME, "c1_summary2"));
+        getWhereForDailyAndHourly2days(DERIVED_CUBE_NAME, "c1_summary2"));
     compareQueries(hqlQuery, expected);
   }
 
   @Test
   public void testCubeInsert() throws Exception {
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     String hqlQuery = rewrite("insert overwrite directory" + " 'target/test' select SUM(msr2) from testCube where "
       + TWO_DAYS_RANGE, conf);
-    Map<String, String> wh = getWhereForDailyAndHourly2days(cubeName, "C2_testfact");
+    Map<String, String> wh = getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact");
     String expected = "insert overwrite directory 'target/test' "
-      + getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, wh);
+      + getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null, wh);
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("insert overwrite directory" + " 'target/test' cube select SUM(msr2) from testCube where "
@@ -269,9 +257,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     hqlQuery = rewrite("insert overwrite local directory" + " 'target/test' select SUM(msr2) from testCube where "
       + TWO_DAYS_RANGE, conf);
-    wh = getWhereForDailyAndHourly2days(cubeName, "C2_testfact");
+    wh = getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact");
     expected = "insert overwrite local directory 'target/test' "
-      + getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, wh);
+      + getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null, wh);
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("insert overwrite local directory" + " 'target/test' cube select SUM(msr2) from testCube where "
@@ -280,9 +268,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     hqlQuery = rewrite("insert overwrite table temp" + " select SUM(msr2) from testCube where " + TWO_DAYS_RANGE,
       conf);
-    wh = getWhereForDailyAndHourly2days(cubeName, "C2_testfact");
+    wh = getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact");
     expected = "insert overwrite table temp "
-      + getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, wh);
+      + getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null, wh);
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("insert overwrite table temp" + " cube select SUM(msr2) from testCube where " + TWO_DAYS_RANGE,
@@ -323,8 +311,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery, expected;
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     // Test with partition existence
@@ -332,68 +320,68 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
         getWhereForHourly2days("c1_testfact2"));
     compareQueries(hqlQuery, expected);
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, false);
 
     // Tests for valid tables
-    conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact");
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    conf.set(CubeQueryConfUtil.getValidFactTablesKey(TEST_CUBE_NAME), "testFact");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C1_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C1_testfact"));
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
-    conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(CubeQueryConfUtil.getValidFactTablesKey(TEST_CUBE_NAME), "testFact");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
-    conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1");
+    conf.set(CubeQueryConfUtil.getValidFactTablesKey(TEST_CUBE_NAME), "testFact2");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
         getWhereForHourly2days("c1_testfact2"));
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
-    conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1");
+    conf.set(CubeQueryConfUtil.getValidFactTablesKey(TEST_CUBE_NAME), "testFact2");
     conf.set(getValidStorageTablesKey("testFact2"), "C1_testFact2");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
         getWhereForHourly2days("c1_testfact2"));
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact");
+    conf.set(CubeQueryConfUtil.getValidFactTablesKey(TEST_CUBE_NAME), "testFact");
     conf.set(getValidStorageTablesKey("testfact"), "C1_testFact");
     conf.set(getValidUpdatePeriodsKey("testfact", "C1"), "HOURLY");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-    expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c1_testfact"));
+    expected = getExpectedQuery(TEST_CUBE_NAME,
+      "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c1_testfact"));
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     conf.set(getValidStorageTablesKey("testfact"), "C2_testFact");
     conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "HOURLY");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-    expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c2_testfact"));
+    expected = getExpectedQuery(TEST_CUBE_NAME,
+      "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c2_testfact"));
     compareQueries(hqlQuery, expected);
 
     // max interval test
     conf = new Configuration();
     conf.set(CubeQueryConfUtil.QUERY_MAX_INTERVAL, "HOURLY");
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1,C2");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-    expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c1_testfact2"));
+    expected = getExpectedQuery(TEST_CUBE_NAME,
+      "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c1_testfact2"));
     compareQueries(hqlQuery, expected);
   }
 
@@ -406,8 +394,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "MONTHLY,DAILY");
     ArrayList<String> storages = Lists.newArrayList("c1_testfact", "c2_testfact");
     try {
-      CubeTestSetup.getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(HOURLY, DAILY));
-      CubeTestSetup.getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(MONTHLY));
+      getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(HOURLY, DAILY));
+      getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(MONTHLY));
 
       // Union query
       String hqlQuery;
@@ -430,7 +418,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
       hqlQuery = rewrite("select cityid as `City ID`, msr8, msr7 as `Third measure` "
         + "from testCube where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
 
-      expected = getExpectedUnionQuery(cubeName, storages, provider,
+      expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT testcube.alias0 as `City ID`, sum(testcube.alias1) + max(testcube.alias2), "
           + "case when sum(testcube.alias1) = 0 then 0 else sum(testcube.alias3)/sum(testcube.alias1) end "
           + "as `Third Measure`",
@@ -445,7 +433,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
       hqlQuery = rewrite("select cityid as `City ID`, msr3 as `Third measure` from testCube where "
         + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr7 > 10", conf);
 
-      expected = getExpectedUnionQuery(cubeName, storages, provider,
+      expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT testcube.alias0 as `City ID`, max(testcube.alias1) as `Third measure`",
         null, "group by testcube.alias0 having "
           + "(case when sum(testcube.alias2)=0 then 0 else sum(testcube.alias3)/sum(testcube.alias2) end > 10 )",
@@ -458,7 +446,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
       hqlQuery = rewrite("select cityid as `City ID`, msr3 as `Third measure` from testCube where "
         + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr8 > 10", conf);
 
-      expected = getExpectedUnionQuery(cubeName, storages, provider,
+      expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT testcube.alias0 as `City ID`, max(testcube.alias1) as `Third measure`",
         null, "GROUP BY testcube.alias0 "
           + "HAVING (sum(testcube.alias2) + max(testcube.alias1)) > 10 ",
@@ -469,7 +457,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
       hqlQuery = rewrite("select msr3 as `Measure 3` from testCube where "
         + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr2 > 10 and msr2 < 100", conf);
 
-      expected = getExpectedUnionQuery(cubeName, storages, provider,
+      expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT max(testcube.alias0) as `Measure 3` ",
         null, " HAVING sum(testcube.alias1) > 10 and sum(testcube.alias1) < 100",
         "SELECT max(testcube.msr3) as `alias0`, sum(testcube.msr2) as `alias1`", null, null);
@@ -479,7 +467,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + "SUM(msr2) as `Measure 2` from testCube where "
         + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr4 > 10 order by cityid desc limit 5", conf);
 
-      expected = getExpectedUnionQuery(cubeName, storages, provider,
+      expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT testcube.alias0, testcube.alias1 as `City ID`, max(testcube.alias2) as `Measure 3`, "
           + "count(testcube.alias3), sum(testcube.alias4) as `Measure 2`",
         null, "group by testcube.alias0, testcube.alias1 "
@@ -490,12 +478,12 @@ public class TestCubeRewriter extends TestQueryRewrite {
       compareQueries(hqlQuery, expected);
 
       conf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, false);
-      conf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, false);
+      conf.setBoolean(ENABLE_SELECT_TO_GROUPBY, false);
       hqlQuery = rewrite("select cityid as `City ID`, msr3 as `Measure 3`, "
         + "SUM(msr2) as `Measure 2` from testCube" + " where "
         + TWO_MONTHS_RANGE_UPTO_HOURS + " group by zipcode having msr4 > 10 order by cityid desc limit 5", conf);
 
-      expected = getExpectedUnionQuery(cubeName, storages, provider,
+      expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT testcube.alias0 as `City ID`,max(testcube.alias1) as `Measure 3`,sum(testcube.alias2) as `Measure 2` ",
         null, "group by testcube.alias3 having count(testcube.alias4) > 10 order by testcube.alias0 desc limit 5",
         "SELECT testcube.cityid as `alias0`, max(testcube.msr3) as `alias1`, "
@@ -503,7 +491,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         null, "GROUP BY testcube.zipcode");
       compareQueries(hqlQuery, expected);
     } finally {
-      CubeTestSetup.getStorageToUpdatePeriodMap().clear();
+      getStorageToUpdatePeriodMap().clear();
     }
 
   }
@@ -517,12 +505,12 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.set(getValidUpdatePeriodsKey("testfact2", "C1"), "YEARLY");
     conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "HOURLY");
 
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(DAILY));
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(HOURLY));
+    getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(DAILY));
+    getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(HOURLY));
     StoragePartitionProvider provider = new StoragePartitionProvider() {
       @Override
       public Map<String, String> providePartitionsForStorage(String storage) {
-        return getWhereForDailyAndHourly2days(cubeName, storage);
+        return getWhereForDailyAndHourly2days(TEST_CUBE_NAME, storage);
       }
     };
     try {
@@ -530,20 +518,21 @@ public class TestCubeRewriter extends TestQueryRewrite {
       String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
       System.out.println("HQL:" + hqlQuery);
 
-      String expected = getExpectedUnionQuery(cubeName, Lists.newArrayList("c1_testfact", "c2_testfact"), provider,
+      String expected = getExpectedUnionQuery(TEST_CUBE_NAME,
+        Lists.newArrayList("c1_testfact", "c2_testfact"), provider,
         "select sum(testcube.alias0) ", null, null,
         "select sum(testcube.msr2) as `alias0` from ", null, null
       );
       compareQueries(hqlQuery, expected);
     } finally {
-      CubeTestSetup.getStorageToUpdatePeriodMap().clear();
+      getStorageToUpdatePeriodMap().clear();
     }
   }
 
   @Test
   public void testCubeWhereQueryWithMultipleTablesForMonth() throws Exception {
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2,C3");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1,C2,C3");
     conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, true);
     conf.set(getValidStorageTablesKey("testfact"), "");
     conf.set(getValidUpdatePeriodsKey("testfact", "C1"), "HOURLY");
@@ -552,9 +541,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "DAILY");
     conf.set(getValidUpdatePeriodsKey("testfact", "C3"), "MONTHLY");
 
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(HOURLY));
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(DAILY));
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("c3_testfact", Lists.newArrayList(MONTHLY));
+    getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(HOURLY));
+    getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(DAILY));
+    getStorageToUpdatePeriodMap().put("c3_testfact", Lists.newArrayList(MONTHLY));
     StoragePartitionProvider provider = new StoragePartitionProvider() {
       @Override
       public Map<String, String> providePartitionsForStorage(String storage) {
@@ -566,13 +555,13 @@ public class TestCubeRewriter extends TestQueryRewrite {
       String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
       System.out.println("HQL:" + hqlQuery);
       ArrayList<String> storages = Lists.newArrayList("c1_testfact", "c2_testfact", "c3_testfact");
-      String expected = getExpectedUnionQuery(cubeName, storages, provider,
+      String expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "select sum(testcube.alias0)", null, null,
         "select sum(testcube.msr2) as `alias0` from ", null, null
       );
       compareQueries(hqlQuery, expected);
     } finally {
-      CubeTestSetup.getStorageToUpdatePeriodMap().clear();
+      getStorageToUpdatePeriodMap().clear();
     }
   }
 
@@ -580,29 +569,29 @@ public class TestCubeRewriter extends TestQueryRewrite {
   public void testPartColAsQueryColumn() throws Exception {
     Configuration conf = getConf();
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, false);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C3");
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C3");
+    conf.setBoolean(DISABLE_AUTO_JOINS, false);
     String hql, expected;
     hql = rewrite(
       "select countrydim.name, msr2 from" + " testCube" + " where countrydim.region = 'asia' and "
         + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select countrydim.name, sum(testcube.msr2)" + " FROM ", " JOIN " + getDbName()
+      getExpectedQuery(TEST_CUBE_NAME, "select countrydim.name, sum(testcube.msr2)" + " FROM ", " JOIN " + getDbName()
           + "c3_countrytable_partitioned countrydim on testcube.countryid=countrydim.id and countrydim.dt='latest'",
         "countrydim.region='asia'",
         " group by countrydim.name ", null,
-        getWhereForHourly2days(cubeName, "C3_testfact2_raw"));
+        getWhereForHourly2days(TEST_CUBE_NAME, "C3_testfact2_raw"));
     compareQueries(hql, expected);
     hql = rewrite(
       "select statedim.name, statedim.countryid, msr2 from" + " testCube" + " where statedim.countryid = 5 and "
         + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select statedim.name, statedim.countryid, sum(testcube.msr2)" + " FROM ",
+      getExpectedQuery(TEST_CUBE_NAME, "select statedim.name, statedim.countryid, sum(testcube.msr2)" + " FROM ",
         " JOIN " + getDbName()
           + "c3_statetable_partitioned statedim ON" + " testCube.stateid = statedim.id and statedim.dt = 'latest'",
         "statedim.countryid=5",
         " group by statedim.name, statedim.countryid", null,
-        getWhereForHourly2days(cubeName, "C3_testfact2_raw"));
+        getWhereForHourly2days(TEST_CUBE_NAME, "C3_testfact2_raw"));
     compareQueries(hql, expected);
   }
 
@@ -610,7 +599,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
   public void testCubeJoinQuery() throws Exception {
     // q1
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     String hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " join citydim on testCube.cityid = citydim.id" + " where "
         + TWO_DAYS_RANGE, conf);
@@ -618,9 +607,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
     //    joinWhereConds.add(StorageUtil.getWherePartClause("dt", "citydim", StorageConstants.getPartitionsForLatest
     // ()));
     String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2)" + " FROM ", " INNER JOIN " + getDbName()
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2)" + " FROM ", " INNER JOIN " + getDbName()
           + "c2_citytable citydim ON" + " testCube.cityid = citydim.id", null, null, joinWhereConds,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
@@ -638,16 +627,17 @@ public class TestCubeRewriter extends TestQueryRewrite {
       rewrite("select statedim.name, SUM(msr2) from" + " testCube" + " join citydim on testCube.cityid = citydim.id"
         + " left outer join statedim on statedim.id = citydim.stateid"
         + " right outer join zipdim on citydim.zipcode = zipdim.code" + " where " + TWO_DAYS_RANGE, getConf());
-    joinWhereConds = new ArrayList<String>();
+    joinWhereConds = new ArrayList<>();
     joinWhereConds.add(StorageUtil.getWherePartClause("dt", "citydim", StorageConstants.getPartitionsForLatest()));
     joinWhereConds.add(StorageUtil.getWherePartClause("dt", "zipdim", StorageConstants.getPartitionsForLatest()));
     expected =
-      getExpectedQuery(cubeName, "select statedim.name," + " sum(testcube.msr2) FROM ", "INNER JOIN " + getDbName()
+      getExpectedQuery(TEST_CUBE_NAME,
+        "select statedim.name," + " sum(testcube.msr2) FROM ", "INNER JOIN " + getDbName()
           + "c1_citytable citydim ON" + " testCube.cityid = citydim.id LEFT OUTER JOIN " + getDbName()
           + "c1_statetable statedim" + " ON statedim.id = citydim.stateid AND "
           + "(statedim.dt = 'latest') RIGHT OUTER JOIN " + getDbName() + "c1_ziptable"
           + " zipdim ON citydim.zipcode = zipdim.code", null, " group by" + " statedim.name ", joinWhereConds,
-        getWhereForHourly2days(cubeName, "C1_testfact2"));
+        getWhereForHourly2days(TEST_CUBE_NAME, "C1_testfact2"));
     compareQueries(hqlQuery, expected);
 
     // q3
@@ -672,18 +662,18 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + " left outer join citydim on testCube.cityid = citydim.id"
         + " left outer join zipdim on citydim.zipcode = zipdim.code" + " where " + TWO_DAYS_RANGE, getConf());
     expected =
-      getExpectedQuery(cubeName, "select citydim.name," + " sum(testcube.msr2) FROM ", " LEFT OUTER JOIN "
+      getExpectedQuery(TEST_CUBE_NAME, "select citydim.name," + " sum(testcube.msr2) FROM ", " LEFT OUTER JOIN "
           + getDbName() + "c1_citytable citydim ON" + " testCube.cityid = citydim.id and (citydim.dt = 'latest') "
           + " LEFT OUTER JOIN " + getDbName() + "c1_ziptable" + " zipdim ON citydim.zipcode = zipdim.code AND "
           + "(zipdim.dt = 'latest')", null, " group by" + " citydim.name ", null,
-        getWhereForHourly2days(cubeName, "C1_testfact2"));
+        getWhereForHourly2days(TEST_CUBE_NAME, "C1_testfact2"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " join countrydim on testCube.countryid = countrydim.id" + " where "
         + TWO_MONTHS_RANGE_UPTO_MONTH, getConf());
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " INNER JOIN " + getDbName()
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", " INNER JOIN " + getDbName()
           + "c1_countrytable countrydim ON testCube.countryid = " + " countrydim.id", null, null, null,
         getWhereForMonthly2months("c2_testfactmonthly"));
     compareQueries(hqlQuery, expected);
@@ -698,23 +688,23 @@ public class TestCubeRewriter extends TestQueryRewrite {
   public void testCubeGroupbyWithConstantProjected() throws Exception {
     // check constants
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     String hqlQuery1 = rewrite("select cityid, 99, \"placeHolder\", -1001, SUM(msr2) from testCube" + " where "
       + TWO_DAYS_RANGE, conf);
-    String expected1 = getExpectedQuery(cubeName, "select testcube.cityid, 99, \"placeHolder\", -1001,"
+    String expected1 = getExpectedQuery(TEST_CUBE_NAME, "select testcube.cityid, 99, \"placeHolder\", -1001,"
         + " sum(testcube.msr2) FROM ", null, " group by testcube.cityid ",
-      getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery1, expected1);
 
     // check constants with expression
     String hqlQuery2 = rewrite(
       "select cityid, case when stateid = 'za' then \"Not Available\" end, 99, \"placeHolder\", -1001, "
         + "SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-    String expected2 = getExpectedQuery(cubeName,
+    String expected2 = getExpectedQuery(TEST_CUBE_NAME,
       "select testcube.cityid, case when testcube.stateid = 'za' then \"Not Available\" end, 99, \"placeHolder\","
         + " -1001, sum(testcube.msr2) FROM ", null,
       " group by testcube.cityid, case when testcube.stateid = 'za' then \"Not Available\" end ",
-      getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery2, expected2);
 
     // check expression with boolean and numeric constants
@@ -723,7 +713,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + "case when stateid='za' then 99 else -1001 end,  "
         + "SUM(msr2), SUM(msr2 + 39), SUM(msr2) + 567 from testCube" + " where " + TWO_DAYS_RANGE, conf);
     String expected3 = getExpectedQuery(
-      cubeName,
+      TEST_CUBE_NAME,
       "select testcube.cityid, testcube.stateid + 99, 44 + testcube.stateid, testcube.stateid - 33,"
         + " 999 - testcube.stateid, TRUE, FALSE, round(123.4567,2), "
         + "case when testcube.stateid='za' then 99 else -1001 end,"
@@ -732,14 +722,14 @@ public class TestCubeRewriter extends TestQueryRewrite {
       " group by testcube.cityid,testcube.stateid + 99, 44 + testcube.stateid, testcube.stateid - 33, "
         + "999 - testcube.stateid, "
         + " case when testcube.stateid='za' then 99 else -1001 end ",
-      getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery3, expected3);
   }
 
   @Test
   public void testCubeGroupbyQuery() throws Exception {
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     String hqlQuery =
       rewrite("select name, SUM(msr2) from" + " testCube join citydim on testCube.cityid = citydim.id where "
         + TWO_DAYS_RANGE, conf);
@@ -747,9 +737,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
     //    joinWhereConds.add(StorageUtil.getWherePartClause("dt", "citydim", StorageConstants.getPartitionsForLatest
     // ()));
     String expected =
-      getExpectedQuery(cubeName, "select citydim.name," + " sum(testcube.msr2) FROM ", "INNER JOIN " + getDbName()
+      getExpectedQuery(TEST_CUBE_NAME, "select citydim.name," + " sum(testcube.msr2) FROM ", "INNER JOIN " + getDbName()
           + "c2_citytable citydim ON" + " testCube.cityid = citydim.id", null, " group by citydim.name ",
-        joinWhereConds, getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+        joinWhereConds, getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
@@ -759,70 +749,70 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     hqlQuery = rewrite("select cityid, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select testcube.cityid," + " sum(testcube.msr2) FROM ", null,
-        " group by testcube.cityid ", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.cityid," + " sum(testcube.msr2) FROM ", null,
+        " group by testcube.cityid ", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select round(cityid), SUM(msr2) from" + " testCube where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select round(testcube.cityid)," + " sum(testcube.msr2) FROM ", null,
-        " group by round(testcube.cityid) ", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select round(testcube.cityid)," + " sum(testcube.msr2) FROM ", null,
+        " group by round(testcube.cityid) ", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select SUM(msr2) from testCube" + "  where " + TWO_DAYS_RANGE + "group by round(zipcode)", conf);
     expected =
-      getExpectedQuery(cubeName, "select round(testcube.zipcode)," + " sum(testcube.msr2) FROM ", null,
-        " group by round(testcube.zipcode) ", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select round(testcube.zipcode)," + " sum(testcube.msr2) FROM ", null,
+        " group by round(testcube.zipcode) ", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select round(cityid), SUM(msr2) from" + " testCube where " + TWO_DAYS_RANGE + " group by zipcode",
         conf);
     expected =
-      getExpectedQuery(cubeName, "select " + " round(testcube.cityid), sum(testcube.msr2) FROM ", null,
-        " group by testcube.zipcode", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select " + " round(testcube.cityid), sum(testcube.msr2) FROM ", null,
+        " group by testcube.zipcode", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select round(cityid), SUM(msr2) from" + " testCube where " + TWO_DAYS_RANGE, conf);
     expected =
-      getExpectedQuery(cubeName, "select " + " round(testcube.cityid), sum(testcube.msr2) FROM ", null,
-        " group by round(testcube.cityid)", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select " + " round(testcube.cityid), sum(testcube.msr2) FROM ", null,
+        " group by round(testcube.cityid)", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select cityid, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE + " group by round(zipcode)",
         conf);
     expected =
-      getExpectedQuery(cubeName, "select " + " testcube.cityid, sum(testcube.msr2) FROM ", null,
-        " group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select " + " testcube.cityid, sum(testcube.msr2) FROM ", null,
+        " group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE + " group by round(zipcode)", conf);
     expected =
-      getExpectedQuery(cubeName, "select round(testcube.zipcode)," + " sum(testcube.msr2) FROM ", null,
-        " group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select round(testcube.zipcode)," + " sum(testcube.msr2) FROM ", null,
+        " group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select cityid, msr2 from testCube" + " where " + TWO_DAYS_RANGE + " group by round(zipcode)", conf);
     expected =
-      getExpectedQuery(cubeName, "select " + " testcube.cityid, sum(testcube.msr2) FROM ", null,
-        " group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select " + " testcube.cityid, sum(testcube.msr2) FROM ", null,
+        " group by round(testcube.zipcode)", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select round(zipcode) rzc," + " msr2 from testCube where " + TWO_DAYS_RANGE + " group by zipcode"
         + " order by rzc", conf);
     expected =
-      getExpectedQuery(cubeName, "select round(testcube.zipcode) as `rzc`," + " sum(testcube.msr2) FROM ", null,
-        " group by testcube.zipcode  order by rzc asc", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select round(testcube.zipcode) as `rzc`," + " sum(testcube.msr2) FROM ", null,
+        " group by testcube.zipcode  order by rzc asc", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     // rewrite with expressions
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1, C2");
+    conf.setBoolean(DISABLE_AUTO_JOINS, false);
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1, C2");
     hqlQuery =
       rewrite("SELECT citydim.name AS g1," + " CASE  WHEN citydim.name=='NULL'  THEN 'NULL' "
         + " WHEN citydim.name=='X'  THEN 'X-NAME' " + " WHEN citydim.name=='Y'  THEN 'Y-NAME' "
@@ -854,7 +844,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + "";
     expected =
       getExpectedQuery(
-        cubeName,
+        TEST_CUBE_NAME,
         "SELECT ( citydim.name ) as `g1` ,"
           + "  case  when (( citydim.name ) ==  'NULL' ) then  'NULL'  when (( citydim.name ) ==  'X' )"
           + " then  'X-NAME'  when (( citydim.name ) ==  'Y' ) then  'Y-NAME'"
@@ -963,7 +953,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String expectedRewrittenQuery = "SELECT ( citydim . name ) as `Alias With Spaces` , sum(( testcube . msr2 )) "
       + "as `TestMeasure`  FROM TestQueryRewrite.c2_testfact testcube inner JOIN TestQueryRewrite.c2_citytable citydim "
       + "ON (( testcube . cityid ) = ( citydim . id )) WHERE (((( testcube . dt ) =  '"
-      + CubeTestSetup.getDateUptoHours(LAST_HOUR) + "' ))) GROUP BY ( citydim . name )";
+      + getDateUptoHours(getDateWithOffset(HOURLY, -1)) + "' ))) GROUP BY ( citydim . name )";
 
     String actualRewrittenQuery = rewrite(inputQuery, getConfWithStorages("C2"));
 
@@ -980,7 +970,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String expectedRewrittenQuery = "SELECT ( citydim . name ) as `Alias With Spaces` , sum(( testcube . msr2 )) "
       + "as `TestMeasure`  FROM TestQueryRewrite.c2_testfact testcube inner JOIN TestQueryRewrite.c2_citytable citydim "
       + "ON (( testcube . cityid ) = ( citydim . id )) WHERE (((( testcube . dt ) =  '"
-      + CubeTestSetup.getDateUptoHours(LAST_HOUR) + "' ))) GROUP BY ( citydim . name )";
+      + getDateUptoHours(getDateWithOffset(HOURLY, -1)) + "' ))) GROUP BY ( citydim . name )";
 
     String actualRewrittenQuery = rewrite(inputQuery, getConfWithStorages("C2"));
 
@@ -992,8 +982,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery = rewrite("select SUM(msr2) m2 from" + " testCube where " + TWO_DAYS_RANGE, getConfWithStorages(
       "C2"));
     String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) as `m2` FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) as `m2` FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select SUM(msr2) from testCube mycube" + " where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
@@ -1005,8 +995,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     hqlQuery =
       rewrite("select SUM(testCube.msr2) from testCube" + " where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select mycube.msr2 m2 from testCube" + " mycube where " + TWO_DAYS_RANGE, getConfWithStorages(
@@ -1018,8 +1008,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     hqlQuery = rewrite("select testCube.msr2 m2 from testCube" + " where " + TWO_DAYS_RANGE, getConfWithStorages("C2"));
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) as `m2` FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) as `m2` FROM ", null, null,
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
   }
 
@@ -1028,7 +1018,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, getConfWithStorages("C2"));
     String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, null,
         getWhereForMonthlyDailyAndHourly2months("C2_testfact"));
     compareQueries(hqlQuery, expected);
   }
@@ -1079,7 +1069,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery = rewrite("select cityid, SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_MONTH,
       getConfWithStorages("C2"));
     String expected =
-      getExpectedQuery(cubeName, "select testcube.cityid," + " sum(testcube.msr2) FROM ", null,
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.cityid," + " sum(testcube.msr2) FROM ", null,
         "group by testcube.cityid", getWhereForMonthly2months("c2_testfact"));
     compareQueries(hqlQuery, expected);
   }
@@ -1172,26 +1162,26 @@ public class TestCubeRewriter extends TestQueryRewrite {
         "c1_citytable", true);
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     hqlQuery = rewrite("select name, stateid from citydim", conf);
     expected =
       getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c2_citytable", false);
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1");
     hqlQuery = rewrite("select name, stateid from citydim", conf);
     expected =
       getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c1_citytable", true);
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "");
     conf.set(CubeQueryConfUtil.VALID_STORAGE_DIM_TABLES, "C1_citytable");
     hqlQuery = rewrite("select name, stateid from citydim", conf);
     expected =
       getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c1_citytable", true);
     compareQueries(hqlQuery, expected);
 
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "");
     conf.set(CubeQueryConfUtil.VALID_STORAGE_DIM_TABLES, "C2_citytable");
     hqlQuery = rewrite("select name, stateid from citydim", conf);
     expected =
@@ -1221,13 +1211,13 @@ public class TestCubeRewriter extends TestQueryRewrite {
       getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", " limit 100", "c1_citytable",
         true);
     compareQueries(hqlQuery, expected);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     hqlQuery = rewrite("select name, stateid from citydim " + "limit 100", conf);
     expected =
       getExpectedQuery("citydim", "select citydim.name," + "citydim.stateid from ", " limit 100", "c2_citytable",
         false);
     compareQueries(hqlQuery, expected);
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C1");
     hqlQuery = rewrite("select name, stateid from citydim" + " limit 100", conf);
     expected =
       getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", " limit 100", "c1_citytable",
@@ -1263,16 +1253,16 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String[] expectedQueries = {
       getExpectedQuery("t", "SELECT t.cityid, sum(t.msr2) FROM ", null, " group by t.cityid",
         getWhereForDailyAndHourly2days("t", "C2_testfact")),
-      getExpectedQuery(cubeName, "SELECT testCube.cityid, sum(testCube.msr2)" + " FROM ",
+      getExpectedQuery(TEST_CUBE_NAME, "SELECT testCube.cityid, sum(testCube.msr2)" + " FROM ",
         " testcube.cityid > 100 ", " group by testcube.cityid having" + " sum(testCube.msr2) < 1000",
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact")),
-      getExpectedQuery(cubeName, "SELECT testCube.cityid, sum(testCube.msr2)" + " FROM ",
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact")),
+      getExpectedQuery(TEST_CUBE_NAME, "SELECT testCube.cityid, sum(testCube.msr2)" + " FROM ",
         " testcube.cityid > 100 ", " group by testcube.cityid having"
           + " sum(testCube.msr2) < 1000 order by testCube.cityid asc",
-        getWhereForDailyAndHourly2days(cubeName, "C2_testfact")),
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact")),
     };
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    conf.set(DRIVER_SUPPORTED_STORAGES, "C2");
     for (int i = 0; i < queries.length; i++) {
       String hql = rewrite(queries[i], conf);
       compareQueries(hql, expectedQueries[i]);
@@ -1284,58 +1274,55 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery = rewrite("select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE,
       getConfWithStorages("C1"));
     String expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
-        " group by testcube.dim1", getWhereForDailyAndHourly2days(cubeName, "C1_summary1"));
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
+        " group by testcube.dim1", getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C1_summary1"));
     compareQueries(hqlQuery, expected);
     hqlQuery =
       rewrite("select dim1, dim2, COUNT(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE,
         getConfWithStorages("C1"));
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
           + " sum(testcube.msr2), max(testcube.msr3) FROM ", null, " group by testcube.dim1, testcube.dim2",
-        getWhereForDailyAndHourly2days(cubeName, "C1_summary2"));
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C1_summary2"));
     compareQueries(hqlQuery, expected);
     hqlQuery =
       rewrite("select dim1, dim2, cityid, msr4," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE,
         getConfWithStorages("C1"));
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, testcube.cityid,"
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, testcube,dim2, testcube.cityid,"
           + " count(testcube.msr4), sum(testcube.msr2), max(testcube.msr3) FROM ", null,
         " group by testcube.dim1, testcube.dim2, testcube.cityid",
-        getWhereForDailyAndHourly2days(cubeName, "C1_summary3"));
+        getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C1_summary3"));
     compareQueries(hqlQuery, expected);
   }
 
   @Test
   public void testFactsWithTimedDimension() throws Exception {
-    String twoDaysITRange =
-      "time_range_in(it, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
 
     String hqlQuery =
-      rewrite("select dim1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, getConf());
+      rewrite("select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT, getConf());
     String expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
-        " group by testcube.dim1", getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary1"),
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
+        " group by testcube.dim1", getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "it", "C2_summary1"),
         null);
     compareQueries(hqlQuery, expected);
     hqlQuery =
-      rewrite("select dim1, dim2, COUNT(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + twoDaysITRange,
+      rewrite("select dim1, dim2, COUNT(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE_IT,
         getConf());
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
           + " sum(testcube.msr2), max(testcube.msr3) FROM ", null, " group by testcube.dim1, testcube.dim2",
-        getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary2"),
+        getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "it", "C2_summary2"),
         null);
     compareQueries(hqlQuery, expected);
     hqlQuery =
       rewrite("select dim1, dim2, cityid, count(msr4)," + " SUM(msr2), msr3 from testCube" + " where "
-        + twoDaysITRange, getConf());
+        + TWO_DAYS_RANGE_IT, getConf());
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, testcube.cityid,"
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, testcube,dim2, testcube.cityid,"
           + " count(testcube.msr4), sum(testcube.msr2), max(testcube.msr3) FROM ", null,
         " group by testcube.dim1, testcube.dim2, testcube.cityid",
-        getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary3"),
+        getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "it", "C2_summary3"),
         null);
     compareQueries(hqlQuery, expected);
   }
@@ -1344,55 +1331,50 @@ public class TestCubeRewriter extends TestQueryRewrite {
   // now.
   // @Test
   public void testCubeQueryTimedDimensionFilter() throws Exception {
-    String twoDaysITRange =
-      "time_range_in(it, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
-
     String hqlQuery =
-      rewrite("select dim1, max(msr3)," + " msr2 from testCube" + " where (" + twoDaysITRange
+      rewrite("select dim1, max(msr3)," + " msr2 from testCube" + " where (" + TWO_DAYS_RANGE_IT
         + " OR it == 'default') AND dim1 > 1000", getConf());
-    String expected = getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ",
+    String expected = getExpectedQuery(TEST_CUBE_NAME,
+      "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ",
       null, "or (( testcube.it ) == 'default')) and ((testcube.dim1) > 1000)" + " group by testcube.dim1",
-      getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary1"),
+      getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "it", "C2_summary1"),
       null);
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE + " OR ("
-      + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS + " AND dt='default')", getConf());
+      + TWO_DAYS_RANGE_BEFORE_4_DAYS + " AND dt='default')", getConf());
 
     String expecteddtRangeWhere1 =
-      getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "dt", TWODAYS_BACK, NOW)
+      getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "dt", TWODAYS_BACK, NOW)
         + " OR ("
-        + getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "dt", BEFORE_4_DAYS_START, BEFORE_4_DAYS_END) + ")";
+        + getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "dt", BEFORE_6_DAYS, BEFORE_4_DAYS) + ")";
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, " AND testcube.dt='default'",
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, " AND testcube.dt='default'",
         expecteddtRangeWhere1, "c2_testfact");
     compareQueries(hqlQuery, expected);
 
     String expecteddtRangeWhere2 =
       "("
-        + getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "dt", TWODAYS_BACK, NOW)
+        + getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "dt", TWODAYS_BACK, NOW)
         + " AND testcube.dt='dt1') OR "
-        + getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "dt", BEFORE_4_DAYS_START, BEFORE_4_DAYS_END);
+        + getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "dt", BEFORE_6_DAYS, BEFORE_4_DAYS);
     hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " where (" + TWO_DAYS_RANGE + " AND dt='dt1') OR ("
-        + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS + " AND dt='default')", getConf());
+        + TWO_DAYS_RANGE_BEFORE_4_DAYS + " AND dt='default')", getConf());
     expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, " AND testcube.dt='default'",
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ", null, " AND testcube.dt='default'",
         expecteddtRangeWhere2, "c2_testfact");
     compareQueries(hqlQuery, expected);
 
-    String twoDaysPTRange =
-      "time_range_in(pt, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
+    String twoDaysPTRange = getTimeRangeString("pt", DAILY, -2, 0, HOURLY);
     hqlQuery =
-      rewrite("select dim1, max(msr3)," + " msr2 from testCube where (" + twoDaysITRange + " OR (" + twoDaysPTRange
+      rewrite("select dim1, max(msr3)," + " msr2 from testCube where (" + TWO_DAYS_RANGE_IT + " OR (" + twoDaysPTRange
         + " and it == 'default')) AND dim1 > 1000", getConf());
     String expectedITPTrange =
-      getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", TWODAYS_BACK, NOW) + " OR ("
-        + getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "pt", TWODAYS_BACK, NOW) + ")";
+      getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "it", TWODAYS_BACK, NOW) + " OR ("
+        + getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "pt", TWODAYS_BACK, NOW) + ")";
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
         "AND testcube.it == 'default' and testcube.dim1 > 1000 group by testcube.dim1", expectedITPTrange,
         "C2_summary1");
     compareQueries(hqlQuery, expected);
@@ -1400,19 +1382,16 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
   @Test
   public void testLookAhead() throws Exception {
-    String twoDaysITRange =
-      "time_range_in(it, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
 
     Configuration conf = getConf();
     conf.set(CubeQueryConfUtil.PROCESS_TIME_PART_COL, "pt");
     conf.setClass(CubeQueryConfUtil.TIME_RANGE_WRITER_CLASS, AbridgedTimeRangeWriter.class, TimeRangeWriter.class);
-    CubeQueryContext ctx = rewriteCtx("select dim1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange,
+    CubeQueryContext ctx = rewriteCtx("select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT,
       conf);
     assertEquals(ctx.candidateFacts.size(), 1);
     CandidateFact candidateFact = ctx.candidateFacts.iterator().next();
     Set<FactPartition> partsQueried = new TreeSet<>(candidateFact.getPartsQueried());
-    Date ceilDay = DateUtil.getCeilDate(TWODAYS_BACK, DAILY);
+    Date ceilDay = DAILY.getCeilDate(getDateWithOffset(DAILY, -2));
     Date nextDay = DateUtils.addDays(ceilDay, 1);
     Date nextToNextDay = DateUtils.addDays(nextDay, 1);
     HashSet<String> storageTables = Sets.newHashSet();
@@ -1421,7 +1400,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     }
     TreeSet<FactPartition> expectedPartsQueried = Sets.newTreeSet();
     for (TimePartition p : Iterables.concat(
-      TimePartition.of(HOURLY, TWODAYS_BACK).rangeUpto(TimePartition.of(HOURLY, ceilDay)),
+      TimePartition.of(HOURLY, getDateWithOffset(DAILY, -2)).rangeUpto(TimePartition.of(HOURLY, ceilDay)),
       TimePartition.of(DAILY, ceilDay).rangeUpto(TimePartition.of(DAILY, nextDay)),
       TimePartition.of(HOURLY, nextDay).rangeUpto(TimePartition.of(HOURLY, NOW)))) {
       FactPartition fp = new FactPartition("it", p, null, storageTables);
@@ -1436,7 +1415,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     }
     assertEquals(partsQueried, expectedPartsQueried);
     conf.setInt(CubeQueryConfUtil.LOOK_AHEAD_PT_PARTS_PFX, 3);
-    ctx = rewriteCtx("select dim1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange,
+    ctx = rewriteCtx("select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT,
       conf);
     partsQueried = new TreeSet<>(ctx.candidateFacts.iterator().next().getPartsQueried());
     // pt does not exist beyond 1 day. So in this test, max look ahead possible is 3
@@ -1447,35 +1426,35 @@ public class TestCubeRewriter extends TestQueryRewrite {
   public void testCubeQueryWithMultipleRanges() throws Exception {
     String hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE + " OR "
-        + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C2"));
+        + TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C2"));
 
     String expectedRangeWhere =
-      getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "dt", TWODAYS_BACK, NOW)
+      getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "dt", TWODAYS_BACK, NOW)
         + " OR "
-        + getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "dt", BEFORE_4_DAYS_START, BEFORE_4_DAYS_END);
-    String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, expectedRangeWhere, "c2_testfact");
+        + getWhereForDailyAndHourly2daysWithTimeDim(TEST_CUBE_NAME, "dt", BEFORE_6_DAYS, BEFORE_4_DAYS);
+    String expected = getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2) FROM ",
+      null, null, expectedRangeWhere, "c2_testfact");
     compareQueries(hqlQuery, expected);
     hqlQuery =
       rewrite("select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE + " OR "
-        + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
+        + TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
         " group by testcube.dim1", expectedRangeWhere, "C1_summary1");
     compareQueries(hqlQuery, expected);
     hqlQuery =
       rewrite("select dim1, dim2, COUNT(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE
-        + " OR " + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
+        + " OR " + TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, testcube,dim2, count(testcube.msr4),"
           + " sum(testcube.msr2), max(testcube.msr3) FROM ", null, " group by testcube.dim1, testcube.dim2",
         expectedRangeWhere, "C1_summary2");
     compareQueries(hqlQuery, expected);
     hqlQuery =
       rewrite("select dim1, dim2, cityid, count(msr4)," + " SUM(msr2), msr3 from testCube" + " where " + TWO_DAYS_RANGE
-        + " OR " + CubeTestSetup.TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
+        + " OR " + TWO_DAYS_RANGE_BEFORE_4_DAYS, getConfWithStorages("C1"));
     expected =
-      getExpectedQuery(cubeName, "select testcube.dim1, testcube,dim2, testcube.cityid,"
+      getExpectedQuery(TEST_CUBE_NAME, "select testcube.dim1, testcube,dim2, testcube.cityid,"
           + " count(testcube.msr4), sum(testcube.msr2), max(testcube.msr3) FROM ", null,
         " group by testcube.dim1, testcube.dim2, testcube.cityid", expectedRangeWhere, "C1_summary3");
     compareQueries(hqlQuery, expected);
@@ -1507,7 +1486,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + " left outer join statedim s1 on c1.stateid = s1.id"
         + " left outer join citydim c2 on s1.countryid = c2.id where " + TWO_DAYS_RANGE;
     Configuration conf = getConfWithStorages("C1");
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, true);
+    conf.setBoolean(DISABLE_AUTO_JOINS, true);
     String hqlQuery = rewrite(cubeQl, conf);
     String db = getDbName();
     String expectedJoin =
@@ -1517,19 +1496,19 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + db + "c1_citytable c2 ON (( s1 . countryid ) = ( c2 . id )) AND (c2.dt = 'latest')";
 
     String expected =
-      getExpectedQuery(cubeName, "select sum(testcube.msr2)" + " FROM ", expectedJoin, null, null, null,
-        getWhereForHourly2days(cubeName, "C1_testfact2"));
+      getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2)" + " FROM ", expectedJoin, null, null, null,
+        getWhereForHourly2days(TEST_CUBE_NAME, "C1_testfact2"));
     compareQueries(hqlQuery, expected);
   }
 
   @Test
   public void testJoinPathColumnLifeValidation() throws Exception {
     HiveConf testConf = new HiveConf(new HiveConf(getConf(), HiveConf.class));
-    testConf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
-    System.out.println("@@ Joins disabled? " + testConf.get(CubeQueryConfUtil.DISABLE_AUTO_JOINS));
+    testConf.setBoolean(DISABLE_AUTO_JOINS, false);
+    System.out.println("@@ Joins disabled? " + testConf.get(DISABLE_AUTO_JOINS));
     // Set column life of dim2 column in testCube
     CubeMetastoreClient client = CubeMetastoreClient.getInstance(testConf);
-    Cube cube = (Cube) client.getCube(cubeName);
+    Cube cube = (Cube) client.getCube(TEST_CUBE_NAME);
 
     ReferencedDimAtrribute col = (ReferencedDimAtrribute) cube.getColumnByName("cdim2");
     assertNotNull(col);
@@ -1544,7 +1523,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     } finally {
       // Add old column back
       cube.alterDimension(col);
-      client.alterCube(cubeName, cube);
+      client.alterCube(TEST_CUBE_NAME, cube);
     }
 
     // Assert same query succeeds with valid column
@@ -1556,7 +1535,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         col.getReferences(), oneWeekBack, null,
         col.getCost());
     cube.alterDimension(newDim2);
-    client.alterCube(cubeName, cube);
+    client.alterCube(TEST_CUBE_NAME, cube);
     String hql = rewrite(query, testConf);
     assertNotNull(hql);
   }
@@ -1582,12 +1561,11 @@ public class TestCubeRewriter extends TestQueryRewrite {
     // Disabling conf should not replace the time dimension
 
     String query =
-      "SELECT test_time_dim, msr2 FROM testCube where " + "time_range_in(test_time_dim, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
+      "SELECT test_time_dim, msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD;
 
     HiveConf hconf = new HiveConf(getConf(), TestCubeRewriter.class);
-    hconf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
-    hconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2,C3,C4");
+    hconf.setBoolean(DISABLE_AUTO_JOINS, false);
+    hconf.set(DRIVER_SUPPORTED_STORAGES, "C1,C2,C3,C4");
     hconf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, true);
 
     CubeQueryRewriter rewriter = new CubeQueryRewriter(hconf, hconf);
@@ -1637,7 +1615,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
   public void testSelectDimonlyJoinOnCube() throws Exception {
     String query = "SELECT count (distinct citydim.name) from testCube where " + TWO_DAYS_RANGE;
     Configuration conf = new Configuration(getConf());
-    conf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
+    conf.setBoolean(DISABLE_AUTO_JOINS, false);
     String hql = rewrite(query, conf);
     System.out.println("@@ HQL = " + hql);
     assertNotNull(hql);
@@ -1658,10 +1636,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
 
     // Test 1 - check for contained part columns
-    String twoDaysITRange =
-      "time_range_in(it, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
-    String query = "select dim1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange;
+    String query = "select dim1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT;
 
     HiveConf conf = new HiveConf(getConf(), TestCubeRewriter.class);
     conf.set(CubeQueryConfUtil.PROCESS_TIME_PART_COL, "pt");

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
deleted file mode 100644
index ff9a96d..0000000
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
+++ /dev/null
@@ -1,299 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import static java.util.Calendar.DAY_OF_MONTH;
-import static java.util.Calendar.MONTH;
-
-import static org.apache.lens.cube.metadata.UpdatePeriod.*;
-import static org.apache.lens.cube.parse.DateUtil.*;
-
-import static org.apache.commons.lang.time.DateUtils.addMilliseconds;
-
-import static org.testng.Assert.assertEquals;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.Set;
-
-import org.apache.lens.cube.parse.DateUtil.*;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.commons.lang.time.DateUtils;
-
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Test;
-
-import com.beust.jcommander.internal.Sets;
-import com.google.common.collect.Lists;
-
-import lombok.extern.slf4j.Slf4j;
-
-/**
- * Unit tests for cube DateUtil class TestDateUtil.
- */
-@Slf4j
-public class TestDateUtil {
-
-  public static final String[] TEST_PAIRS = {
-    "2013-Jan-01", "2013-Jan-31", "2013-Jan-01", "2013-May-31",
-    "2013-Jan-01", "2013-Dec-31", "2013-Feb-01", "2013-Apr-25",
-    "2012-Feb-01", "2013-Feb-01", "2011-Feb-01", "2013-Feb-01",
-    "2013-Jan-02", "2013-Feb-02", "2013-Jan-02", "2013-Mar-02",
-  };
-
-  public static final SimpleDateFormat DATE_FMT = new SimpleDateFormat("yyyy-MMM-dd");
-
-  private Date[] pairs;
-
-  @BeforeTest
-  public void setUp() {
-    pairs = new Date[TEST_PAIRS.length];
-    for (int i = 0; i < TEST_PAIRS.length; i++) {
-      try {
-        pairs[i] = DATE_FMT.parse(TEST_PAIRS[i]);
-      } catch (ParseException e) {
-        log.error("Parsing exception while setup.", e);
-      }
-    }
-  }
-
-
-  @Test
-  public void testMonthsBetween() throws Exception {
-    int i = 0;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
-      new CoveringInfo(1, true),
-      "2013-Jan-01 to 2013-Jan-31");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
-      new CoveringInfo(5, true),
-      "2013-Jan-01 to 2013-May-31");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
-      new CoveringInfo(12, true),
-      "2013-Jan-01 to 2013-Dec-31");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(2, false),
-      "2013-Feb-01 to 2013-Apr-25");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(12, true),
-      "2012-Feb-01 to 2013-Feb-01");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(24, true),
-      "2011-Feb-01 to 2013-Feb-01");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "2013-Jan-02 to 2013-Feb-02");
-
-    i += 2;
-    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(1, false),
-      "2013-Jan-02 to 2013-Mar-02");
-  }
-
-  @Test
-  public void testQuartersBetween() throws Exception {
-    int i = 0;
-    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "2013-Jan-01 to 2013-Jan-31");
-
-    i += 2;
-    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(1, false),
-      "2013-Jan-01 to 2013-May-31");
-
-    i += 2;
-    assertEquals(getQuarterlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
-      new CoveringInfo(4, true),
-      "2013-Jan-01 to 2013-Dec-31");
-
-    i += 2;
-    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "2013-Feb-01 to 2013-Apr-25");
-
-    i += 2;
-    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(3, false),
-      "2012-Feb-01 to 2013-Feb-01");
-
-    i += 2;
-    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(7, false),
-      "2011-Feb-01 to 2013-Feb-01");
-  }
-
-  @Test
-  public void testYearsBetween() throws Exception {
-    int i = 0;
-    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "" + pairs[i] + "->" + pairs[i + 1]);
-
-    i += 2;
-    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "" + pairs[i] + "->" + pairs[i + 1]);
-
-    i += 2;
-    assertEquals(getYearlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
-      new CoveringInfo(1, true), ""
-        + pairs[i] + "->" + pairs[i + 1]);
-
-    i += 2;
-    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "" + pairs[i] + "->" + pairs[i + 1]);
-
-    i += 2;
-    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
-      "" + pairs[i] + "->" + pairs[i + 1]);
-
-    i += 2;
-    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(1, false),
-      "" + pairs[i] + "->" + pairs[i + 1]);
-  }
-
-  @Test
-  public void testWeeksBetween() throws Exception {
-    CoveringInfo weeks;
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-2"));
-    assertEquals(weeks, new CoveringInfo(1, true), "2013-May-26 to 2013-Jun-2");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-3"));
-    assertEquals(weeks, new CoveringInfo(0, false), "2013-May-26 to 2013-Jun-2");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-9"));
-    assertEquals(weeks, new CoveringInfo(1, false), "2013-May-26 to 2013-Jun-2");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-1"));
-    assertEquals(weeks, new CoveringInfo(0, false), "2013-May-27 to 2013-Jun-1");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-25"), DATE_FMT.parse("2013-Jun-2"));
-    assertEquals(weeks, new CoveringInfo(1, false), "2013-May-25 to 2013-Jun-1");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-9"));
-    assertEquals(weeks, new CoveringInfo(2, true), "2013-May-26 to 2013-Jun-8");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-10"));
-    assertEquals(weeks, new CoveringInfo(2, false), "2013-May-26 to 2013-Jun-10");
-
-    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2015-Dec-27"), DATE_FMT.parse("2016-Jan-03"));
-    assertEquals(weeks, new CoveringInfo(1, true), "2015-Dec-27 to 2016-Jan-03");
-  }
-
-  @Test
-  public void testNowWithGranularity() throws Exception {
-    String dateFmt = "yyyy/MM/dd-HH.mm.ss.SSS";
-    // Tuesday Sept 23, 2014, 12.02.05.500 pm
-    String testDateStr = "2014/09/23-12.02.05.500";
-    final SimpleDateFormat sdf = new SimpleDateFormat(dateFmt);
-    final Date testDate = sdf.parse(testDateStr);
-
-    System.out.print("@@ testDateStr=" + testDateStr + " parsed date=" + testDate);
-
-    // Tests without a diff, just resolve now with different granularity
-    assertEquals(testDateStr, sdf.format(resolveDate("now", testDate)));
-    assertEquals("2014/01/01-00.00.00.000", sdf.format(resolveDate("now.year", testDate)));
-    assertEquals("2014/09/01-00.00.00.000", sdf.format(resolveDate("now.month", testDate)));
-    // Start of week resolves to Sunday
-    assertEquals("2014/09/21-00.00.00.000", sdf.format(resolveDate("now.week", testDate)));
-    assertEquals("2014/09/23-00.00.00.000", sdf.format(resolveDate("now.day", testDate)));
-    assertEquals("2014/09/23-12.00.00.000", sdf.format(resolveDate("now.hour", testDate)));
-    assertEquals("2014/09/23-12.02.00.000", sdf.format(resolveDate("now.minute", testDate)));
-    assertEquals("2014/09/23-12.02.05.000", sdf.format(resolveDate("now.second", testDate)));
-
-    // Tests with a diff
-    assertEquals("2014/09/22-00.00.00.000", sdf.format(resolveDate("now.day -1day", testDate)));
-    assertEquals("2014/09/23-10.00.00.000", sdf.format(resolveDate("now.hour -2hour", testDate)));
-    assertEquals("2014/09/24-12.00.00.000", sdf.format(resolveDate("now.hour +24hour", testDate)));
-    assertEquals("2015/01/01-00.00.00.000", sdf.format(resolveDate("now.year +1year", testDate)));
-    assertEquals("2014/02/01-00.00.00.000", sdf.format(resolveDate("now.year +1month", testDate)));
-  }
-
-  @Test
-  public void testFloorDate() throws ParseException {
-    Date date = ABSDATE_PARSER.get().parse("2015-01-01-00:00:00,000");
-    Date curDate = date;
-    for (int i = 0; i < 284; i++) {
-      assertEquals(getFloorDate(curDate, YEARLY), date);
-      curDate = addMilliseconds(curDate, 111111111);
-    }
-    assertEquals(getFloorDate(curDate, YEARLY), DateUtils.addYears(date, 1));
-    assertEquals(getFloorDate(date, WEEKLY), ABSDATE_PARSER.get().parse("2014-12-28-00:00:00,000"));
-  }
-
-  @Test
-  public void testCeilDate() throws ParseException {
-    Date date = ABSDATE_PARSER.get().parse("2015-12-26-06:30:15,040");
-    assertEquals(getCeilDate(date, YEARLY), ABSDATE_PARSER.get().parse("2016-01-01-00:00:00,000"));
-    assertEquals(getCeilDate(date, MONTHLY), ABSDATE_PARSER.get().parse("2016-01-01-00:00:00,000"));
-    assertEquals(getCeilDate(date, DAILY), ABSDATE_PARSER.get().parse("2015-12-27-00:00:00,000"));
-    assertEquals(getCeilDate(date, HOURLY), ABSDATE_PARSER.get().parse("2015-12-26-07:00:00,000"));
-    assertEquals(getCeilDate(date, MINUTELY), ABSDATE_PARSER.get().parse("2015-12-26-06:31:00,000"));
-    assertEquals(getCeilDate(date, SECONDLY), ABSDATE_PARSER.get().parse("2015-12-26-06:30:16,000"));
-    assertEquals(getCeilDate(date, WEEKLY), ABSDATE_PARSER.get().parse("2015-12-27-00:00:00,000"));
-  }
-
-  @Test
-  public void testTimeDiff() throws LensException {
-    ArrayList<String> minusFourDays =
-      Lists.newArrayList("-4 days", "-4days", "-4day", "-4 day", "- 4days", "- 4 day");
-    ArrayList<String> plusFourDays =
-      Lists.newArrayList("+4 days", "4 days", "+4days", "4day", "4 day", "+ 4days", "+ 4 day", "+4 day");
-    Set<TimeDiff> diffs = Sets.newHashSet();
-    for (String diffStr : minusFourDays) {
-      diffs.add(TimeDiff.parseFrom(diffStr));
-    }
-    assertEquals(diffs.size(), 1);
-    TimeDiff minusFourDaysDiff = diffs.iterator().next();
-    assertEquals(minusFourDaysDiff.quantity, -4);
-    assertEquals(minusFourDaysDiff.updatePeriod, DAILY);
-
-    diffs.clear();
-    for (String diffStr : plusFourDays) {
-      diffs.add(TimeDiff.parseFrom(diffStr));
-    }
-    assertEquals(diffs.size(), 1);
-    TimeDiff plusFourDaysDiff = diffs.iterator().next();
-    assertEquals(plusFourDaysDiff.quantity, 4);
-    assertEquals(plusFourDaysDiff.updatePeriod, DAILY);
-    Date now = new Date();
-    assertEquals(minusFourDaysDiff.offsetFrom(plusFourDaysDiff.offsetFrom(now)), now);
-    assertEquals(plusFourDaysDiff.offsetFrom(minusFourDaysDiff.offsetFrom(now)), now);
-    assertEquals(minusFourDaysDiff.negativeOffsetFrom(now), plusFourDaysDiff.offsetFrom(now));
-    assertEquals(minusFourDaysDiff.offsetFrom(now), plusFourDaysDiff.negativeOffsetFrom(now));
-  }
-
-  @Test
-  public void testRelativeToAbsolute() throws LensException {
-    Date now = new Date();
-    Date nowDay = DateUtils.truncate(now, DAY_OF_MONTH);
-    Date nowDayMinus2Days = DateUtils.add(nowDay, DAY_OF_MONTH, -2);
-    assertEquals(relativeToAbsolute("now", now), DateUtil.ABSDATE_PARSER.get().format(now));
-    assertEquals(relativeToAbsolute("now.day", now), DateUtil.ABSDATE_PARSER.get().format(nowDay));
-    assertEquals(relativeToAbsolute("now.day - 2 days", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-    assertEquals(relativeToAbsolute("now.day - 2 day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-    assertEquals(relativeToAbsolute("now.day - 2day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-    assertEquals(relativeToAbsolute("now.day -2 day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-    assertEquals(relativeToAbsolute("now.day -2 days", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-  }
-}


[28/50] [abbrv] lens git commit: LENS-123 : Adds ability to load different instances of same driver class

Posted by sh...@apache.org.
LENS-123 : Adds ability to load different instances of same driver class


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/114dab34
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/114dab34
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/114dab34

Branch: refs/heads/LENS-581
Commit: 114dab34642929152230cd049f1436ad796bedc2
Parents: 404d451
Author: Puneet Gupta <pu...@gmail.com>
Authored: Mon Nov 30 11:06:12 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Mon Nov 30 11:06:12 2015 +0530

----------------------------------------------------------------------
 .../lens/api/query/LensPreparedQuery.java       |   8 +-
 .../org/apache/lens/api/query/LensQuery.java    |   4 +-
 .../lens/cli/commands/LensQueryCommands.java    |   2 +-
 .../drivers/hive/hive1/hivedriver-site.xml      |  80 ++++++++++
 .../org/apache/lens/client/LensStatement.java   |   2 +-
 .../drivers/hive/hive1/hivedriver-site.xml      |  80 ++++++++++
 .../lens/driver/cube/TestMinCostSelector.java   |   8 +-
 .../org/apache/lens/driver/es/ESDriver.java     |  13 +-
 .../org/apache/lens/driver/es/ESDriverTest.java |   2 +-
 .../org/apache/lens/driver/hive/HiveDriver.java |  51 ++++---
 .../apache/lens/driver/hive/TestHiveDriver.java |   4 +-
 .../lens/driver/hive/TestRemoteHiveDriver.java  |  16 +-
 .../drivers/hive/hive1/hivedriver-site.xml      |  49 +++++++
 .../src/test/resources/hivedriver-site.xml      |  49 -------
 .../org/apache/lens/driver/jdbc/JDBCDriver.java |  38 ++---
 .../driver/jdbc/TestColumnarSQLRewriter.java    |   6 +-
 .../apache/lens/driver/jdbc/TestJDBCFinal.java  |   2 +-
 .../apache/lens/driver/jdbc/TestJdbcDriver.java |  12 +-
 .../drivers/jdbc/jdbc1/jdbcdriver-site.xml      |  70 +++++++++
 .../src/test/resources/jdbcdriver-site.xml      |  70 ---------
 .../drivers/hive/hive1/hivedriver-site.xml      |  80 ++++++++++
 lens-ml-lib/src/test/resources/lens-site.xml    |   2 +-
 .../lib/query/TestAbstractFileFormatter.java    |   2 +-
 .../lens/server/api/LensConfConstants.java      |  20 ++-
 .../server/api/driver/AbstractLensDriver.java   |  71 +++++++++
 .../lens/server/api/driver/LensDriver.java      |  13 +-
 .../server/api/query/AbstractQueryContext.java  |   2 +-
 .../api/query/DriverSelectorQueryContext.java   |   4 +-
 .../server/api/query/FinishedLensQuery.java     |  15 +-
 .../server/api/query/PreparedQueryContext.java  |   7 +-
 .../lens/server/api/query/QueryContext.java     |   3 +-
 .../lens/server/api/driver/MockDriver.java      |  14 +-
 .../lens/server/api/query/MockQueryContext.java |   2 +-
 .../api/query/TestAbstractQueryContext.java     |   4 +-
 .../apache/lens/server/query/LensServerDAO.java |  10 +-
 .../server/query/QueryExecutionServiceImpl.java | 146 ++++++++++++++-----
 .../apache/lens/server/rewrite/RewriteUtil.java |   2 +-
 .../lens/server/session/LensSessionImpl.java    |  15 +-
 .../src/main/resources/lensserver-default.xml   |   2 +-
 .../org/apache/lens/server/LensJerseyTest.java  |   3 +
 .../apache/lens/server/TestServerRestart.java   |  10 +-
 .../lens/server/query/TestEventService.java     |   2 +
 .../apache/lens/server/query/TestLensDAO.java   |   2 +-
 .../lens/server/query/TestQueryConstraints.java |  60 +-------
 .../lens/server/query/TestQueryService.java     |  38 +++--
 .../lens/server/rewrite/TestRewriting.java      |  17 ++-
 .../drivers/hive/hive1/hivedriver-site.xml      |  85 +++++++++++
 .../drivers/hive/hive2/hivedriver-site.xml      |  85 +++++++++++
 .../drivers/jdbc/jdbc1/jdbcdriver-site.xml      |  58 ++++++++
 .../mock/fail1/failing-query-driver-site.xml    |  32 ++++
 .../mockHive/mockHive1/hivedriver-site.xml      |  95 ++++++++++++
 .../mockHive/mockHive2/hivedriver-site.xml      |  95 ++++++++++++
 .../resources/failing-query-driver-site.xml     |  27 ----
 .../src/test/resources/hivedriver-site.xml      |  80 ----------
 .../src/test/resources/jdbcdriver-site.xml      |  55 -------
 lens-server/src/test/resources/lens-site.xml    |   4 +-
 src/site/apt/admin/config-server.apt            |  37 ++++-
 src/site/apt/admin/config.apt                   |   2 +-
 src/site/apt/lenshome/install-and-run.apt       |  18 ++-
 .../drivers/hive/hive1/hivedriver-site.xml      |  57 ++++++++
 .../drivers/jdbc/jdbc1/jdbcdriver-site.xml      |  50 +++++++
 .../server/hivedriver-site.xml                  |  57 --------
 .../server/jdbcdriver-site.xml                  |  50 -------
 tools/conf-pseudo-distr/server/lens-site.xml    |   2 +-
 .../drivers/hive/hive1/hivedriver-site.xml      |  41 ++++++
 .../drivers/jdbc/jdbc1/jdbcdriver-site.xml      |  50 +++++++
 tools/conf/server/hivedriver-site.xml           |  41 ------
 tools/conf/server/jdbcdriver-site.xml           |  50 -------
 tools/conf/server/lens-site.xml                 |   2 +-
 69 files changed, 1463 insertions(+), 722 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-api/src/main/java/org/apache/lens/api/query/LensPreparedQuery.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/LensPreparedQuery.java b/lens-api/src/main/java/org/apache/lens/api/query/LensPreparedQuery.java
index 9595ce9..f5bc35d 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/LensPreparedQuery.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/LensPreparedQuery.java
@@ -50,8 +50,8 @@ import lombok.NoArgsConstructor;
  *          the prepared time
  * @param preparedUser
  *          the prepared user
- * @param selectedDriverClassName
- *          the selected driver class name
+ * @param selectedDriverName
+ *          the selected driver's fully qualified name
  * @param driverQuery
  *          the driver query
  * @param conf
@@ -93,11 +93,11 @@ public class LensPreparedQuery {
   private String preparedUser;
 
   /**
-   * The selected driver class name.
+   * The selected driver's fully qualified name.
    */
   @XmlElement
   @Getter
-  private String selectedDriverClassName;
+  private String selectedDriverName;
 
   /**
    * The driver query.

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-api/src/main/java/org/apache/lens/api/query/LensQuery.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/LensQuery.java b/lens-api/src/main/java/org/apache/lens/api/query/LensQuery.java
index 204ecee..23c4dec 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/LensQuery.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/LensQuery.java
@@ -46,7 +46,7 @@ import lombok.*;
  *          the priority
  * @param isPersistent
  *          the is persistent
- * @param selectedDriverClassName
+ * @param selectedDriverName
  *          the selected driver class name
  * @param driverQuery
  *          the driver query
@@ -121,7 +121,7 @@ public class LensQuery {
    */
   @XmlElement
   @Getter
-  private String selectedDriverClassName;
+  private String selectedDriverName;
 
   /**
    * The driver query.

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
index 83b919b..fe9b84d 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
@@ -364,7 +364,7 @@ public class LensQueryCommands extends BaseLensCommand {
         .append("Prepare handle:").append(prepared.getPrepareHandle()).append("\n")
         .append("User:" + prepared.getPreparedUser()).append("\n")
         .append("Prepared at:").append(prepared.getPreparedTime()).append("\n")
-        .append("Selected driver :").append(prepared.getSelectedDriverClassName()).append("\n")
+        .append("Selected driver :").append(prepared.getSelectedDriverName()).append("\n")
         .append("Driver query:").append(prepared.getDriverQuery()).append("\n");
       if (prepared.getConf() != null) {
         sb.append("Conf:").append(prepared.getConf().getProperties()).append("\n");

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-cli/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/resources/drivers/hive/hive1/hivedriver-site.xml b/lens-cli/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
new file mode 100644
index 0000000..f2aed88
--- /dev/null
+++ b/lens-cli/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+  <property>
+    <name>lens.driver.hive.connection.class</name>
+    <value>org.apache.lens.driver.hive.RemoteThriftConnection</value>
+    <description>The connection class from HiveDriver to HiveServer.</description>
+  </property>
+
+  <property>
+     
+    <name>hive.metastore.local</name>
+     
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.metastore.warehouse.dir</name>
+    <value>${project.build.directory}/hive/warehouse</value>
+  </property>
+
+  <property>
+    <name>javax.jdo.option.ConnectionURL</name>
+    <value>jdbc:derby:;databaseName=target/metastore_db;create=true</value>
+    <description>JDBC connect string for a JDBC metastore</description>
+  </property>
+
+  <property>
+    <name>hive.lock.manager</name>
+    <value>org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.bind.host</name>
+    <value>localhost</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.port</name>
+    <value>12345</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.connect.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>lens.driver.test.key</name>
+    <value>set</value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensStatement.java b/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
index 0a511f0..40e6d76 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
@@ -213,7 +213,7 @@ public class LensStatement {
       }
     }
     LensClient.getCliLooger().info("User query: '{}' was submitted to {}", query.getUserQuery(),
-      query.getSelectedDriverClassName());
+      query.getSelectedDriverName());
     if (query.getDriverQuery() != null) {
       LensClient.getCliLooger().info(" Driver query: '{}' and Driver handle: {}", query.getDriverQuery(),
         query.getDriverOpHandle());

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-client/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-client/src/test/resources/drivers/hive/hive1/hivedriver-site.xml b/lens-client/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
new file mode 100644
index 0000000..f2aed88
--- /dev/null
+++ b/lens-client/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+  <property>
+    <name>lens.driver.hive.connection.class</name>
+    <value>org.apache.lens.driver.hive.RemoteThriftConnection</value>
+    <description>The connection class from HiveDriver to HiveServer.</description>
+  </property>
+
+  <property>
+     
+    <name>hive.metastore.local</name>
+     
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.metastore.warehouse.dir</name>
+    <value>${project.build.directory}/hive/warehouse</value>
+  </property>
+
+  <property>
+    <name>javax.jdo.option.ConnectionURL</name>
+    <value>jdbc:derby:;databaseName=target/metastore_db;create=true</value>
+    <description>JDBC connect string for a JDBC metastore</description>
+  </property>
+
+  <property>
+    <name>hive.lock.manager</name>
+    <value>org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.bind.host</name>
+    <value>localhost</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.port</name>
+    <value>12345</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.connect.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>lens.driver.test.key</name>
+    <value>set</value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-cube/src/test/java/org/apache/lens/driver/cube/TestMinCostSelector.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/driver/cube/TestMinCostSelector.java b/lens-cube/src/test/java/org/apache/lens/driver/cube/TestMinCostSelector.java
index 72f1497..8267229 100644
--- a/lens-cube/src/test/java/org/apache/lens/driver/cube/TestMinCostSelector.java
+++ b/lens-cube/src/test/java/org/apache/lens/driver/cube/TestMinCostSelector.java
@@ -66,13 +66,13 @@ public class TestMinCostSelector {
     LensConf qconf = new LensConf();
 
     MockDriver d1 = new MockDriver();
-    d1.configure(conf);
+    d1.configure(conf, null, null);
     MockDriver d2 = new MockDriver();
-    d2.configure(conf);
+    d2.configure(conf, null, null);
     MockFailDriver fd1 = new MockFailDriver();
-    fd1.configure(conf);
+    fd1.configure(conf, null, null);
     MockFailDriver fd2 = new MockFailDriver();
-    fd2.configure(conf);
+    fd2.configure(conf, null, null);
 
     drivers.add(d1);
     drivers.add(d2);

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-driver-es/src/main/java/org/apache/lens/driver/es/ESDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-es/src/main/java/org/apache/lens/driver/es/ESDriver.java b/lens-driver-es/src/main/java/org/apache/lens/driver/es/ESDriver.java
index 14d9f99..d166e43 100644
--- a/lens-driver-es/src/main/java/org/apache/lens/driver/es/ESDriver.java
+++ b/lens-driver-es/src/main/java/org/apache/lens/driver/es/ESDriver.java
@@ -48,6 +48,7 @@ import org.apache.lens.server.api.query.cost.FactPartitionBasedQueryCost;
 import org.apache.lens.server.api.query.cost.QueryCost;
 
 import org.apache.commons.lang.Validate;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -62,13 +63,14 @@ import org.antlr.runtime.tree.Tree;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+
 import lombok.extern.slf4j.Slf4j;
 
 /**
  * Driver for elastic search
  */
 @Slf4j
-public class ESDriver implements LensDriver {
+public class ESDriver extends AbstractLensDriver {
 
   private static final AtomicInteger THID = new AtomicInteger();
   private static final double STREAMING_PARTITION_COST = 0;
@@ -317,10 +319,11 @@ public class ESDriver implements LensDriver {
   }
 
   @Override
-  public void configure(Configuration conf) throws LensException {
+  public void configure(Configuration conf, String driverType, String driverName) throws LensException {
+    super.configure(conf, driverType, driverName);
     this.conf = new Configuration(conf);
     this.conf.addResource("esdriver-default.xml");
-    this.conf.addResource("esdriver-site.xml");
+    this.conf.addResource(getDriverResourcePath("esdriver-site.xml"));
     config = new ESDriverConfig(this.conf);
     Class klass;
     try {
@@ -341,10 +344,10 @@ public class ESDriver implements LensDriver {
       | InstantiationException
       | IllegalAccessException
       | InvocationTargetException e) {
-      log.error("ES driver cannot start!", e);
+      log.error("ES driver {} cannot start!", getFullyQualifiedName(), e);
       throw new LensException("Cannot start es driver", e);
     }
-    log.debug("ES Driver configured");
+    log.info("ES Driver {} configured", getFullyQualifiedName());
     asyncQueryPool = Executors.newCachedThreadPool(new ThreadFactory() {
       @Override
       public Thread newThread(Runnable runnable) {

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-driver-es/src/test/java/org/apache/lens/driver/es/ESDriverTest.java
----------------------------------------------------------------------
diff --git a/lens-driver-es/src/test/java/org/apache/lens/driver/es/ESDriverTest.java b/lens-driver-es/src/test/java/org/apache/lens/driver/es/ESDriverTest.java
index f453416..ab6f22b 100644
--- a/lens-driver-es/src/test/java/org/apache/lens/driver/es/ESDriverTest.java
+++ b/lens-driver-es/src/test/java/org/apache/lens/driver/es/ESDriverTest.java
@@ -35,7 +35,7 @@ public abstract class ESDriverTest {
   public void beforeTest() throws LensException {
     initializeConfig(config);
     esDriverConfig = new ESDriverConfig(config);
-    driver.configure(config);
+    driver.configure(config, "es", "es1");
     mockClientES = (MockClientES) driver.getESClient();
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index 19c4793..a84c679 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -50,6 +50,7 @@ import org.apache.lens.server.api.query.priority.CostToPriorityRangeConf;
 import org.apache.lens.server.api.query.priority.QueryPriorityDecider;
 
 import org.apache.commons.lang.StringUtils;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -65,6 +66,7 @@ import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.type.TypeReference;
 
 import com.google.common.collect.ImmutableSet;
+
 import lombok.Getter;
 import lombok.extern.slf4j.Slf4j;
 
@@ -72,7 +74,7 @@ import lombok.extern.slf4j.Slf4j;
  * The Class HiveDriver.
  */
 @Slf4j
-public class HiveDriver implements LensDriver {
+public class HiveDriver extends AbstractLensDriver {
 
   /** The Constant HIVE_CONNECTION_CLASS. */
   public static final String HIVE_CONNECTION_CLASS = "lens.driver.hive.connection.class";
@@ -331,16 +333,18 @@ public class HiveDriver implements LensDriver {
    * @see org.apache.lens.server.api.driver.LensDriver#configure(org.apache.hadoop.conf.Configuration)
    */
   @Override
-  public void configure(Configuration conf) throws LensException {
+  public void configure(Configuration conf, String driverType, String driverName) throws LensException {
+    super.configure(conf, driverType, driverName);
     this.driverConf = new Configuration(conf);
+    String driverConfPath = getDriverResourcePath("hivedriver-site.xml");
     this.driverConf.addResource("hivedriver-default.xml");
-    this.driverConf.addResource("hivedriver-site.xml");
+    this.driverConf.addResource(driverConfPath);
 
     // resources have to be added separately on hiveConf again because new HiveConf() overrides hive.* properties
     // from HiveConf
     this.hiveConf = new HiveConf(conf, HiveDriver.class);
     this.hiveConf.addResource("hivedriver-default.xml");
-    this.hiveConf.addResource("hivedriver-site.xml");
+    this.hiveConf.addResource(driverConfPath);
 
     connectionClass = this.driverConf.getClass(HIVE_CONNECTION_CLASS, EmbeddedThriftConnection.class,
       ThriftConnection.class);
@@ -366,6 +370,7 @@ public class HiveDriver implements LensDriver {
     }
     queryConstraints = getImplementations(QUERY_LAUNCHING_CONSTRAINT_FACTORIES_KEY, driverConf);
     selectionPolicies = getImplementations(WAITING_QUERIES_SELECTION_POLICY_FACTORIES_KEY, driverConf);
+    log.info("Hive driver {} configured successfully", getFullyQualifiedName());
   }
 
   private QueryCost calculateQueryCost(AbstractQueryContext qctx) throws LensException {
@@ -378,7 +383,7 @@ public class HiveDriver implements LensDriver {
 
   @Override
   public QueryCost estimate(AbstractQueryContext qctx) throws LensException {
-    log.info("Estimate: {}", qctx.getDriverQuery(this));
+    log.info("{} Estimate: {}", getFullyQualifiedName(), qctx.getDriverQuery(this));
     if (qctx.getDriverQuery(this) == null) {
       throw new NullPointerException("Null driver query for " + qctx.getUserQuery());
     }
@@ -413,7 +418,7 @@ public class HiveDriver implements LensDriver {
       // explain called again and again
       return (HiveQueryPlan) explainCtx.getDriverContext().getDriverQueryPlan(this);
     }
-    log.info("Explain: {}", explainCtx.getDriverQuery(this));
+    log.info("{} Explain: {}", getFullyQualifiedName(), explainCtx.getDriverQuery(this));
     Configuration explainConf = new Configuration(explainCtx.getDriverConf(this));
     explainConf.setClassLoader(explainCtx.getConf().getClassLoader());
     explainConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
@@ -751,7 +756,7 @@ public class HiveDriver implements LensDriver {
    */
   @Override
   public void close() {
-    log.info("CloseDriver");
+    log.info("CloseDriver {}", getFullyQualifiedName());
     // Close this driver and release all resources
     sessionLock.lock();
     try {
@@ -812,8 +817,8 @@ public class HiveDriver implements LensDriver {
           connection = new ExpirableConnection(tconn, connectionExpiryTimeout);
           thriftConnExpiryQueue.offer(connection);
           threadConnections.put(connectionKey, connection);
-          log.info("New thrift connection {} for thread: {} for user: {} connection ID={}", connectionClass,
-            Thread.currentThread().getId(), user, connection.getConnId());
+          log.info("New thrift connection {} for thread: {} for user: {} connection ID={} on driver:{}",
+              connectionClass, Thread.currentThread().getId(), user, connection.getConnId(), getFullyQualifiedName());
         } catch (Exception e) {
           throw new LensException(e);
         }
@@ -885,7 +890,7 @@ public class HiveDriver implements LensDriver {
       context.unSetDriverPersistent();
       hiveQuery = context.getSelectedDriverQuery();
     }
-    log.info("Hive driver query:{}", hiveQuery);
+    log.info("Hive driver {} query:{}", getFullyQualifiedName(), hiveQuery);
     context.setSelectedDriverQuery(hiveQuery);
   }
 
@@ -914,14 +919,15 @@ public class HiveDriver implements LensDriver {
         try {
           hiveSession = getClient().openSession(ctx.getClusterUser(), "");
           lensToHiveSession.put(sessionDbKey, hiveSession);
-          log.info("New hive session for user: {} , lens session: {} hive session handle: {}", ctx.getClusterUser(),
-            sessionDbKey, hiveSession.getHandleIdentifier());
+          log.info("New hive session for user: {} , lens session: {} , hive session handle: {} , driver : {}",
+              ctx.getClusterUser(), sessionDbKey, hiveSession.getHandleIdentifier(), getFullyQualifiedName());
           for (LensEventListener<DriverEvent> eventListener : driverListeners) {
             try {
               eventListener.onEvent(new DriverSessionStarted(System.currentTimeMillis(), this, lensSession, hiveSession
                 .getSessionId().toString()));
             } catch (Exception exc) {
-              log.error("Error sending driver start event to listener {}", eventListener, exc);
+              log.error("Error sending driver {} start event to listener {}", getFullyQualifiedName(), eventListener,
+                 exc);
             }
           }
         } catch (Exception e) {
@@ -1006,7 +1012,7 @@ public class HiveDriver implements LensDriver {
               return;
             }
           } catch (LensException e) {
-            log.debug("query handle: {} Not yet launched on driver", handle);
+            log.debug("query handle: {} Not yet launched on driver {}", handle, getFullyQualifiedName());
           }
           Thread.sleep(pollInterval);
           timeSpent += pollInterval;
@@ -1069,9 +1075,9 @@ public class HiveDriver implements LensDriver {
         QueryHandle qhandle = (QueryHandle) in.readObject();
         OperationHandle opHandle = new OperationHandle((TOperationHandle) in.readObject());
         hiveHandles.put(qhandle, opHandle);
-        log.debug("Hive driver recovered {}:{}", qhandle, opHandle);
+        log.debug("Hive driver {} recovered {}:{}", getFullyQualifiedName(), qhandle, opHandle);
       }
-      log.info("HiveDriver recovered {} queries", hiveHandles.size());
+      log.info("Hive driver {} recovered {} queries", getFullyQualifiedName(), hiveHandles.size());
       int numSessions = in.readInt();
       for (int i = 0; i < numSessions; i++) {
         String lensId = in.readUTF();
@@ -1079,7 +1085,7 @@ public class HiveDriver implements LensDriver {
           TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6);
         lensToHiveSession.put(lensId, sHandle);
       }
-      log.info("HiveDriver recovered {} sessions", lensToHiveSession.size());
+      log.info("Hive driver {} recovered {} sessions", getFullyQualifiedName(), lensToHiveSession.size());
     }
   }
 
@@ -1096,15 +1102,15 @@ public class HiveDriver implements LensDriver {
       for (Map.Entry<QueryHandle, OperationHandle> entry : hiveHandles.entrySet()) {
         out.writeObject(entry.getKey());
         out.writeObject(entry.getValue().toTOperationHandle());
-        log.debug("Hive driver persisted {}:{}", entry.getKey(), entry.getValue());
+        log.debug("Hive driver {} persisted {}:{}", getFullyQualifiedName(), entry.getKey(), entry.getValue());
       }
-      log.info("HiveDriver persisted {} queries ", hiveHandles.size());
+      log.info("Hive driver {} persisted {} queries ", getFullyQualifiedName(), hiveHandles.size());
       out.writeInt(lensToHiveSession.size());
       for (Map.Entry<String, SessionHandle> entry : lensToHiveSession.entrySet()) {
         out.writeUTF(entry.getKey());
         out.writeObject(entry.getValue().toTSessionHandle());
       }
-      log.info("HiveDriver persisted {} sessions", lensToHiveSession.size());
+      log.info("Hive driver {} persisted {} sessions", getFullyQualifiedName(), lensToHiveSession.size());
     }
   }
 
@@ -1166,14 +1172,15 @@ public class HiveDriver implements LensDriver {
     for (SessionHandle session : hiveSessionsToCheck) {
       if (isSessionInvalid(exc, session)) {
         // We have to expire previous session
-        log.info("Hive server session {} for lens session {} has become invalid", session, lensSession);
+        log.info("{} Hive server session {} for lens session {} has become invalid", getFullyQualifiedName(), session,
+            lensSession);
         sessionLock.lock();
         try {
           // We should close all connections and clear the session map since
           // most likely all sessions are gone
           closeAllConnections();
           lensToHiveSession.clear();
-          log.info("Cleared all sessions");
+          log.info("{} Cleared all sessions", getFullyQualifiedName());
         } finally {
           sessionLock.unlock();
         }

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
index 722a2da..11efd3c 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
@@ -112,13 +112,13 @@ public class TestHiveDriver {
 
   protected void createDriver() throws LensException {
     conf = new HiveConf();
-    conf.addResource("hivedriver-site.xml");
+    conf.addResource("drivers/hive/hive1/hivedriver-site.xml");
     conf.setClass(HiveDriver.HIVE_CONNECTION_CLASS, EmbeddedThriftConnection.class, ThriftConnection.class);
     conf.setClass(HiveDriver.HIVE_QUERY_HOOK_CLASS, MockDriverQueryHook.class, DriverQueryHook.class);
     conf.set("hive.lock.manager", "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager");
     conf.setBoolean(HiveDriver.HS2_CALCULATE_PRIORITY, true);
     driver = new HiveDriver();
-    driver.configure(conf);
+    driver.configure(conf, "hive", "hive1");
     drivers = Lists.<LensDriver>newArrayList(driver);
     System.out.println("TestHiveDriver created");
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
index 98edc28..ab5ada9 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestRemoteHiveDriver.java
@@ -133,11 +133,11 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
   protected void createDriver() throws LensException {
     dataBase = TestRemoteHiveDriver.class.getSimpleName().toLowerCase();
     conf = new HiveConf(remoteConf);
-    conf.addResource("hivedriver-site.xml");
+    conf.addResource("drivers/hive/hive1/hivedriver-site.xml");
     driver = new HiveDriver();
     conf.setBoolean(HiveDriver.HS2_CALCULATE_PRIORITY, true);
     conf.setClass(HiveDriver.HIVE_QUERY_HOOK_CLASS, MockDriverQueryHook.class, DriverQueryHook.class);
-    driver.configure(conf);
+    driver.configure(conf, "hive", "hive1");
     drivers = Lists.<LensDriver>newArrayList(driver);
     System.out.println("TestRemoteHiveDriver created");
   }
@@ -155,7 +155,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
     HiveConf thConf = new HiveConf(conf, TestRemoteHiveDriver.class);
     thConf.setLong(HiveDriver.HS2_CONNECTION_EXPIRY_DELAY, 10000);
     final HiveDriver thrDriver = new HiveDriver();
-    thrDriver.configure(thConf);
+    thrDriver.configure(thConf, "hive", "hive1");
     QueryContext ctx = createContext("USE " + dataBase, conf, thrDriver);
     thrDriver.execute(ctx);
 
@@ -235,12 +235,12 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
   public void testHiveDriverPersistence() throws Exception {
     System.out.println("@@@@ start_persistence_test");
     HiveConf driverConf = new HiveConf(remoteConf, TestRemoteHiveDriver.class);
-    driverConf.addResource("hivedriver-site.xml");
+    driverConf.addResource("drivers/hive/hive1/hivedriver-site.xml");
     driverConf.setLong(HiveDriver.HS2_CONNECTION_EXPIRY_DELAY, 10000);
     driverConf.setBoolean(HiveDriver.HS2_CALCULATE_PRIORITY, false);
 
     final HiveDriver oldDriver = new HiveDriver();
-    oldDriver.configure(driverConf);
+    oldDriver.configure(driverConf, "hive", "hive1");
 
     driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
     driverConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
@@ -284,7 +284,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
     ByteArrayInputStream driverInput = new ByteArrayInputStream(driverBytes.toByteArray());
     HiveDriver newDriver = new HiveDriver();
     newDriver.readExternal(new ObjectInputStream(driverInput));
-    newDriver.configure(driverConf);
+    newDriver.configure(driverConf, "hive", "hive1");
     driverInput.close();
 
     ctx1 = readContext(ctx1bytes, newDriver);
@@ -311,7 +311,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
       boolean isDriverAvailable = (ctx.getSelectedDriver() != null);
       out.writeBoolean(isDriverAvailable);
       if (isDriverAvailable) {
-        out.writeUTF(ctx.getSelectedDriver().getClass().getName());
+        out.writeUTF(ctx.getSelectedDriver().getFullyQualifiedName());
       }
     } finally {
       out.flush();
@@ -340,7 +340,7 @@ public class TestRemoteHiveDriver extends TestHiveDriver {
       ctx.setConf(driver.getConf());
       boolean driverAvailable = in.readBoolean();
       if (driverAvailable) {
-        String clsName = in.readUTF();
+        String driverQualifiedName = in.readUTF();
         ctx.setSelectedDriver(driver);
       }
     } finally {

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-driver-hive/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/resources/drivers/hive/hive1/hivedriver-site.xml b/lens-driver-hive/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
new file mode 100644
index 0000000..613938d
--- /dev/null
+++ b/lens-driver-hive/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+  <property>
+     
+    <name>hive.metastore.local</name>
+     
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.metastore.warehouse.dir</name>
+    <value>${project.build.directory}/hive/warehouse</value>
+  </property>
+
+  <property>
+    <name>javax.jdo.option.ConnectionURL</name>
+    <value>jdbc:derby:;databaseName=target/metastore_db;create=true</value>
+    <description>JDBC connect string for a JDBC metastore</description>
+  </property>
+
+  <property>
+    <name>lens.query.result.parent.dir</name>
+    <value>target/lens-results</value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-driver-hive/src/test/resources/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/resources/hivedriver-site.xml b/lens-driver-hive/src/test/resources/hivedriver-site.xml
deleted file mode 100644
index 613938d..0000000
--- a/lens-driver-hive/src/test/resources/hivedriver-site.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<?xml version="1.0"?>
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<configuration>
-
-  <property>
-     
-    <name>hive.metastore.local</name>
-     
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>hive.metastore.warehouse.dir</name>
-    <value>${project.build.directory}/hive/warehouse</value>
-  </property>
-
-  <property>
-    <name>javax.jdo.option.ConnectionURL</name>
-    <value>jdbc:derby:;databaseName=target/metastore_db;create=true</value>
-    <description>JDBC connect string for a JDBC metastore</description>
-  </property>
-
-  <property>
-    <name>lens.query.result.parent.dir</name>
-    <value>target/lens-results</value>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
index a8b980f..d3fa904 100644
--- a/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
+++ b/lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
@@ -60,12 +60,14 @@ import org.apache.lens.server.model.LogSegregationContext;
 import org.apache.lens.server.model.MappedDiagnosticLogSegregationContext;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.conf.Configuration;
+
+ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 
 import com.google.common.collect.ImmutableSet;
+
 import lombok.Getter;
 import lombok.NonNull;
 import lombok.Setter;
@@ -75,7 +77,7 @@ import lombok.extern.slf4j.Slf4j;
  * This driver is responsible for running queries against databases which can be queried using the JDBC API.
  */
 @Slf4j
-public class JDBCDriver implements LensDriver {
+public class JDBCDriver extends AbstractLensDriver {
 
   /** The Constant THID. */
   public static final AtomicInteger THID = new AtomicInteger();
@@ -420,17 +422,17 @@ public class JDBCDriver implements LensDriver {
     return conf;
   }
 
-  /**
-   * Configure driver with {@link org.apache.hadoop.conf.Configuration} passed
+  /*
+   * (non-Javadoc)
    *
-   * @param conf The configuration object
-   * @throws LensException the lens exception
+   * @see org.apache.lens.server.api.driver.LensDriver#configure(org.apache.hadoop.conf.Configuration)
    */
   @Override
-  public void configure(Configuration conf) throws LensException {
+  public void configure(Configuration conf, String driverType, String driverName) throws LensException {
+    super.configure(conf, driverType, driverName);
     this.conf = new Configuration(conf);
     this.conf.addResource("jdbcdriver-default.xml");
-    this.conf.addResource("jdbcdriver-site.xml");
+    this.conf.addResource(getDriverResourcePath("jdbcdriver-site.xml"));
     init(conf);
     try {
       queryHook = this.conf.getClass(
@@ -440,7 +442,7 @@ public class JDBCDriver implements LensDriver {
       throw new LensException("Can't instantiate driver query hook for hivedriver with given class", e);
     }
     configured = true;
-    log.info("JDBC Driver configured");
+    log.info("JDBC Driver {} configured", getFullyQualifiedName());
   }
 
   /**
@@ -514,9 +516,9 @@ public class JDBCDriver implements LensDriver {
       DummyQueryRewriter.class, QueryRewriter.class);
     try {
       rewriter = queryRewriterClass.newInstance();
-      log.info("Initialized :{}", queryRewriterClass);
+      log.info("{} Initialized :{}", getFullyQualifiedName(), queryRewriterClass);
     } catch (Exception e) {
-      log.error("Unable to create rewriter object", e);
+      log.error("{} Unable to create rewriter object", getFullyQualifiedName(), e);
       throw new LensException(e);
     }
     rewriter.init(conf);
@@ -636,7 +638,7 @@ public class JDBCDriver implements LensDriver {
       explainQuery = rewrittenQuery.replaceAll("select ", "select "
         + explainKeyword + " ");
     }
-    log.info("Explain Query : {}", explainQuery);
+    log.info("{} Explain Query : {}", getFullyQualifiedName(), explainQuery);
     QueryContext explainQueryCtx = QueryContext.createContextWithSingleDriver(explainQuery, null,
       new LensConf(), explainConf, this, explainCtx.getLensSessionIdentifier(), false);
     QueryResult result = null;
@@ -873,7 +875,7 @@ public class JDBCDriver implements LensDriver {
     checkConfigured();
 
     String rewrittenQuery = rewriteQuery(context);
-    log.info("Execute {}", context.getQueryHandle());
+    log.info("{} Execute {}", getFullyQualifiedName(), context.getQueryHandle());
     QueryResult result = executeInternal(context, rewrittenQuery);
     return result.getLensResultSet(true);
 
@@ -919,7 +921,7 @@ public class JDBCDriver implements LensDriver {
       throw new LensException("Query execution rejected: " + context.getQueryHandle() + " reason:" + e.getMessage(), e);
     }
     queryContextMap.put(context.getQueryHandle(), jdbcCtx);
-    log.info("ExecuteAsync: {}", context.getQueryHandle());
+    log.info("{} ExecuteAsync: {}", getFullyQualifiedName(), context.getQueryHandle());
   }
 
   /**
@@ -1032,7 +1034,7 @@ public class JDBCDriver implements LensDriver {
         context.setEndTime(System.currentTimeMillis());
       }
       context.closeResult();
-      log.info("Cancelled query: {}", handle);
+      log.info("{} Cancelled query : {}", getFullyQualifiedName(), handle);
     }
     return cancelResult;
   }
@@ -1053,7 +1055,7 @@ public class JDBCDriver implements LensDriver {
     } finally {
       queryContextMap.remove(handle);
     }
-    log.info("Closed query {}", handle.getHandleId());
+    log.info("{} Closed query {}", getFullyQualifiedName(), handle.getHandleId());
   }
 
   /**
@@ -1069,7 +1071,7 @@ public class JDBCDriver implements LensDriver {
         try {
           closeQuery(query);
         } catch (LensException e) {
-          log.warn("Error closing query : {}", query.getHandleId(), e);
+          log.warn("{} Error closing query : {}", getFullyQualifiedName(), query.getHandleId(), e);
         }
       }
       for (QueryPrepareHandle query : preparedQueries.keySet()) {
@@ -1080,7 +1082,7 @@ public class JDBCDriver implements LensDriver {
             throw new LensException();
           }
         } catch (LensException e) {
-          log.warn("Error closing prapared query : {}", query, e);
+          log.warn("{} Error closing prapared query : {}", getFullyQualifiedName(), query, e);
         }
       }
     } finally {

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
index cf795fa..c412cf0 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
@@ -135,7 +135,7 @@ public class TestColumnarSQLRewriter {
   @BeforeTest
   public void setup() throws Exception {
     conf.addResource("jdbcdriver-default.xml");
-    conf.addResource("jdbcdriver-site.xml");
+    conf.addResource("drivers/jdbc/jdbc1/jdbcdriver-site.xml");
     qtest.init(conf);
 
     List<FieldSchema> factColumns = new ArrayList<>();
@@ -435,8 +435,8 @@ public class TestColumnarSQLRewriter {
     String actual = qtest.rewrite(query, conf, hconf);
 
     String expected = "select ( sales_fact___fact . time_key ), ( time_dim___time_dim . day_of_week ), "
-            + "week((time_dim___time_dim . day )), "
-            + "date(( time_dim___time_dim . day )), ( item_dim___item_dim . item_key ),  "
+            + "week(( time_dim___time_dim . day )), "
+            + "date((time_dim___time_dim . day )), ( item_dim___item_dim . item_key ),  "
             + "case  when (sum(alias2) =  0 ) then  0.0  else sum(alias2) end  as `dollars_sold` , "
             + "format(sum(alias3),  4 ), format(avg(alias4),  '##################.###' ), "
             + "min(alias5), max(alias6) from  (select sales_fact___fact.time_key, "

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJDBCFinal.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJDBCFinal.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJDBCFinal.java
index 053e20d..4eee354 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJDBCFinal.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJDBCFinal.java
@@ -75,7 +75,7 @@ public class TestJDBCFinal {
     baseConf.set(JDBCDriverConfConstants.JDBC_EXPLAIN_KEYWORD_PARAM, "explain plan for ");
 
     driver = new JDBCDriver();
-    driver.configure(baseConf);
+    driver.configure(baseConf, "jdbc", "jdbc1");
     assertNotNull(driver);
     assertTrue(driver.configured);
     System.out.println("Driver configured!");

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
index 425bd6f..2ad9fcb 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestJdbcDriver.java
@@ -88,7 +88,7 @@ public class TestJdbcDriver {
     hConf = new HiveConf(baseConf, this.getClass());
 
     driver = new JDBCDriver();
-    driver.configure(baseConf);
+    driver.configure(baseConf, "jdbc", "jdbc1");
 
     assertNotNull(driver);
     assertTrue(driver.configured);
@@ -309,12 +309,12 @@ public class TestJdbcDriver {
     metricConf.set(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY, TestJdbcDriver.class.getSimpleName());
     driver.estimate(createExplainContext(query1, metricConf));
     MetricRegistry reg = LensMetricsRegistry.getStaticRegistry();
-
+    String driverQualifiledName = driver.getFullyQualifiedName();
     Assert.assertTrue(reg.getGauges().keySet().containsAll(Arrays.asList(
-      "lens.MethodMetricGauge.TestJdbcDriver-JDBCDriver-validate-columnar-sql-rewrite",
-      "lens.MethodMetricGauge.TestJdbcDriver-JDBCDriver-validate-jdbc-prepare-statement",
-      "lens.MethodMetricGauge.TestJdbcDriver-JDBCDriver-validate-thru-prepare",
-      "lens.MethodMetricGauge.TestJdbcDriver-JDBCDriver-jdbc-check-allowed-query")));
+      "lens.MethodMetricGauge.TestJdbcDriver-"+driverQualifiledName+"-validate-columnar-sql-rewrite",
+      "lens.MethodMetricGauge.TestJdbcDriver-"+driverQualifiledName+"-validate-jdbc-prepare-statement",
+      "lens.MethodMetricGauge.TestJdbcDriver-"+driverQualifiledName+"-validate-thru-prepare",
+      "lens.MethodMetricGauge.TestJdbcDriver-"+driverQualifiledName+"-jdbc-check-allowed-query")));
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-driver-jdbc/src/test/resources/drivers/jdbc/jdbc1/jdbcdriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/resources/drivers/jdbc/jdbc1/jdbcdriver-site.xml b/lens-driver-jdbc/src/test/resources/drivers/jdbc/jdbc1/jdbcdriver-site.xml
new file mode 100644
index 0000000..1202074
--- /dev/null
+++ b/lens-driver-jdbc/src/test/resources/drivers/jdbc/jdbc1/jdbcdriver-site.xml
@@ -0,0 +1,70 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+  <property>
+    <name>lens.driver.jdbc.estimate.db.user</name>
+    <value>estimateUser</value>
+  </property>
+
+  <!-- We have to use a different DB for estimate pool, because we are using an inmemory HSQL db.
+  There seem to be some issues regarding sharing of underlying inmemory db with different connection
+  pool. Whichever is constructed later is not able to get connections. -->
+  <property>
+    <name>lens.driver.jdbc.estimate.db.uri</name>
+    <value>jdbc:hsqldb:mem:jdbcTestDBEstimate</value>
+  </property>
+
+  <property>
+    <name>lens.driver.jdbc.estimate.connection.properties</name>
+    <value>random_key=random_value</value>
+    <description>Connection properties for jdbc connection.</description>
+  </property>
+
+
+  <property>
+    <name>lens.driver.jdbc.estimate.pool.max.size</name>
+    <value>50</value>
+  </property>
+
+  <property>
+    <name>lens.driver.jdbc.estimate.pool.idle.time</name>
+    <value>800</value>
+  </property>
+
+  <property>
+    <name>lens.driver.jdbc.estimate.get.connection.timeout</name>
+    <value>25000</value>
+  </property>
+
+  <property>
+    <name>lens.driver.jdbc.estimate.pool.max.statements</name>
+    <value>15</value>
+  </property>
+
+  <property>
+    <name>lens.driver.jdbc.regex.replacement.values</name>
+    <value>weekofyear=week, to_date=date, format_number=format, date_sub\((.*?)\,\s*([0-9]+\s*)\)=date_sub($1\, interval$2 day), date_add\((.*?)\,\s*([0-9]+\s*)\)=date_add($1\, interval $2 day)</value>
+    <description>Rewriting the HQL to optimized sql queries</description>
+  </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-driver-jdbc/src/test/resources/jdbcdriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/resources/jdbcdriver-site.xml b/lens-driver-jdbc/src/test/resources/jdbcdriver-site.xml
deleted file mode 100644
index 1202074..0000000
--- a/lens-driver-jdbc/src/test/resources/jdbcdriver-site.xml
+++ /dev/null
@@ -1,70 +0,0 @@
-<?xml version="1.0"?>
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<configuration>
-  <property>
-    <name>lens.driver.jdbc.estimate.db.user</name>
-    <value>estimateUser</value>
-  </property>
-
-  <!-- We have to use a different DB for estimate pool, because we are using an inmemory HSQL db.
-  There seem to be some issues regarding sharing of underlying inmemory db with different connection
-  pool. Whichever is constructed later is not able to get connections. -->
-  <property>
-    <name>lens.driver.jdbc.estimate.db.uri</name>
-    <value>jdbc:hsqldb:mem:jdbcTestDBEstimate</value>
-  </property>
-
-  <property>
-    <name>lens.driver.jdbc.estimate.connection.properties</name>
-    <value>random_key=random_value</value>
-    <description>Connection properties for jdbc connection.</description>
-  </property>
-
-
-  <property>
-    <name>lens.driver.jdbc.estimate.pool.max.size</name>
-    <value>50</value>
-  </property>
-
-  <property>
-    <name>lens.driver.jdbc.estimate.pool.idle.time</name>
-    <value>800</value>
-  </property>
-
-  <property>
-    <name>lens.driver.jdbc.estimate.get.connection.timeout</name>
-    <value>25000</value>
-  </property>
-
-  <property>
-    <name>lens.driver.jdbc.estimate.pool.max.statements</name>
-    <value>15</value>
-  </property>
-
-  <property>
-    <name>lens.driver.jdbc.regex.replacement.values</name>
-    <value>weekofyear=week, to_date=date, format_number=format, date_sub\((.*?)\,\s*([0-9]+\s*)\)=date_sub($1\, interval$2 day), date_add\((.*?)\,\s*([0-9]+\s*)\)=date_add($1\, interval $2 day)</value>
-    <description>Rewriting the HQL to optimized sql queries</description>
-  </property>
-</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-ml-lib/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/test/resources/drivers/hive/hive1/hivedriver-site.xml b/lens-ml-lib/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
new file mode 100644
index 0000000..f2aed88
--- /dev/null
+++ b/lens-ml-lib/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+  <property>
+    <name>lens.driver.hive.connection.class</name>
+    <value>org.apache.lens.driver.hive.RemoteThriftConnection</value>
+    <description>The connection class from HiveDriver to HiveServer.</description>
+  </property>
+
+  <property>
+     
+    <name>hive.metastore.local</name>
+     
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.metastore.warehouse.dir</name>
+    <value>${project.build.directory}/hive/warehouse</value>
+  </property>
+
+  <property>
+    <name>javax.jdo.option.ConnectionURL</name>
+    <value>jdbc:derby:;databaseName=target/metastore_db;create=true</value>
+    <description>JDBC connect string for a JDBC metastore</description>
+  </property>
+
+  <property>
+    <name>hive.lock.manager</name>
+    <value>org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.bind.host</name>
+    <value>localhost</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.port</name>
+    <value>12345</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.connect.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>lens.driver.test.key</name>
+    <value>set</value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-ml-lib/src/test/resources/lens-site.xml
----------------------------------------------------------------------
diff --git a/lens-ml-lib/src/test/resources/lens-site.xml b/lens-ml-lib/src/test/resources/lens-site.xml
index 3d5dbef..9be7850 100644
--- a/lens-ml-lib/src/test/resources/lens-site.xml
+++ b/lens-ml-lib/src/test/resources/lens-site.xml
@@ -25,7 +25,7 @@
 <configuration>
   <property>
     <name>lens.server.drivers</name>
-    <value>org.apache.lens.driver.hive.HiveDriver</value>
+    <value>hive:org.apache.lens.driver.hive.HiveDriver</value>
   </property>
 
   <property>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-query-lib/src/test/java/org/apache/lens/lib/query/TestAbstractFileFormatter.java
----------------------------------------------------------------------
diff --git a/lens-query-lib/src/test/java/org/apache/lens/lib/query/TestAbstractFileFormatter.java b/lens-query-lib/src/test/java/org/apache/lens/lib/query/TestAbstractFileFormatter.java
index 40e1cdc..c877516 100644
--- a/lens-query-lib/src/test/java/org/apache/lens/lib/query/TestAbstractFileFormatter.java
+++ b/lens-query-lib/src/test/java/org/apache/lens/lib/query/TestAbstractFileFormatter.java
@@ -251,7 +251,7 @@ public abstract class TestAbstractFileFormatter {
   protected QueryContext createContext(Configuration conf, String queryName) {
     final LensDriver mockDriver = new MockDriver();
     try {
-      mockDriver.configure(conf);
+      mockDriver.configure(conf, null, null);
     } catch (LensException e) {
       Assert.fail(e.getMessage());
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java b/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
index 7ee0749..88e5a01 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
@@ -52,9 +52,9 @@ public final class LensConfConstants {
   public static final String METASTORE_PFX = "lens.metastore.";
 
   /**
-   * The Constant DRIVER_CLASSES.
+   * The Constant DRIVER_TYPES_AND_CLASSES
    */
-  public static final String DRIVER_CLASSES = SERVER_PFX + "drivers";
+  public static final String DRIVER_TYPES_AND_CLASSES = SERVER_PFX + "drivers";
   /**
    * The Constant DRIVER_SELECTOR_CLASS.
    */
@@ -943,6 +943,21 @@ public final class LensConfConstants {
   public static final int DEFAULT_FETCH_COUNT_SAVED_QUERY_LIST = 20;
 
   /**
+   * This is the base directory where all drivers are available under lens-server's Conf directory.
+   */
+  public static final String DRIVERS_BASE_DIR = "drivers";
+
+  /**
+   * Name of the property that holds the path of "conf" directory of server
+   */
+  public static final String CONFIG_LOCATION = "config.location";
+
+  /**
+   * Default location of "conf" directory (wrt to lens-server/bin)
+   */
+  public static final String DEFAULT_CONFIG_LOCATION = "../conf";
+
+  /**
    * The Constant RESULTSET_PURGE_ENABLED.
    */
   public static final String RESULTSET_PURGE_ENABLED = SERVER_PFX + "resultset.purge.enabled";
@@ -981,5 +996,4 @@ public final class LensConfConstants {
    * The Constant DEFAULT_HDFS_OUTPUT_RETENTION.
    */
   public static final String DEFAULT_HDFS_OUTPUT_RETENTION = "1 day";
-
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java
new file mode 100644
index 0000000..55f1535
--- /dev/null
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.server.api.driver;
+
+
+import org.apache.lens.server.api.LensConfConstants;
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang.StringUtils;
+
+import org.apache.hadoop.conf.Configuration;
+
+import lombok.Getter;
+
+/**
+ * Abstract class for Lens Driver Implementations. Provides default
+ * implementations and some utility methods for drivers
+ */
+public abstract class AbstractLensDriver implements LensDriver {
+  /**
+   * Separator used for constructing fully qualified name and driver resource path
+   */
+  private static final char SEPARATOR = '/';
+
+  /**
+   * Driver's fully qualified name ( Example hive/hive1, jdbc/mysql1)
+   */
+  @Getter
+  private String fullyQualifiedName = null;
+
+  @Override
+  public void configure(Configuration conf, String driverType, String driverName) throws LensException {
+    if (StringUtils.isBlank(driverType) || StringUtils.isBlank(driverName)) {
+      throw new LensException("Driver Type and Name can not be null or empty");
+    }
+    fullyQualifiedName =  new StringBuilder(driverType).append(SEPARATOR).append(driverName).toString();
+  }
+
+  /**
+   * Gets the path (relative to lens server's conf location) for the driver resource in the system. This is a utility
+   * method that can be used by extending driver implementations to build path for their resources.
+   *
+   * @param resourceName
+   * @return
+   */
+  protected String getDriverResourcePath(String resourceName) {
+    return new StringBuilder(LensConfConstants.DRIVERS_BASE_DIR).append(SEPARATOR).append(getFullyQualifiedName())
+        .append(SEPARATOR).append(resourceName).toString();
+  }
+
+  @Override
+  public String toString() {
+    return getFullyQualifiedName();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
index a5a60d7..c220884 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
@@ -37,7 +37,6 @@ import com.google.common.collect.ImmutableSet;
  * The Interface LensDriver.
  */
 public interface LensDriver extends Externalizable {
-
   /**
    * Get driver configuration
    */
@@ -47,9 +46,11 @@ public interface LensDriver extends Externalizable {
    * Configure driver with {@link Configuration} passed.
    *
    * @param conf The configuration object
+   * @param driverType Type of the driver (Example: hive, jdbc, el)
+   * @param driverName Name of this driver
    * @throws LensException the lens exception
    */
-  void configure(Configuration conf) throws LensException;
+  void configure(Configuration conf, String driverType, String driverName) throws LensException;
 
   /**
    * Estimate the cost of execution for given query.
@@ -199,4 +200,12 @@ public interface LensDriver extends Externalizable {
    * null is never returned.
    */
   ImmutableSet<WaitingQueriesSelectionPolicy> getWaitingQuerySelectionPolicies();
+
+
+  /**
+   * @return fully qualified name of this driver. This should be unique for each driver instance. This name can be used
+   * for referring to the driver while logging, persisting and restoring driver details,etc.
+   * (Examples: hive/hive1, jdbc/mysql1 )
+   */
+  String getFullyQualifiedName();
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
index 0c980a2..2f20113 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
@@ -256,7 +256,7 @@ public abstract class AbstractQueryContext implements Serializable {
       String expMsg = LensUtil.getCauseMessage(e);
       driverQueryContext.setDriverQueryCostEstimateError(e);
       failureCause = new StringBuilder("Driver :")
-        .append(driver.getClass().getName())
+        .append(driver.getFullyQualifiedName())
         .append(" Cause :")
         .append(expMsg)
         .toString();

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
index feac938..5ff59bd 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/DriverSelectorQueryContext.java
@@ -70,7 +70,7 @@ public class DriverSelectorQueryContext {
       String metricId = ctx.driverSpecificConf.get(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY);
       if (!StringUtils.isBlank(metricId)) {
         ctx.driverSpecificConf.set(LensConfConstants.QUERY_METRIC_DRIVER_STACK_NAME,
-          metricId + "-" + driver.getClass().getSimpleName());
+          metricId + "-" + driver.getFullyQualifiedName());
       }
       ctx.setQuery(userQuery);
       driverQueryContextMap.put(driver, ctx);
@@ -198,7 +198,7 @@ public class DriverSelectorQueryContext {
         log.error("Setting driver plan failed for driver {}", driver, e);
         String expMsg = LensUtil.getCauseMessage(e);
         driverQueryContext.setDriverQueryPlanGenerationError(e);
-        detailedFailureCause.append("\n Driver :").append(driver.getClass().getName());
+        detailedFailureCause.append("\n Driver :").append(driver.getFullyQualifiedName());
         detailedFailureCause.append(" Cause :" + expMsg);
         if (failureCause != null && !failureCause.equals(expMsg)) {
           useBuilder = true;

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server-api/src/main/java/org/apache/lens/server/api/query/FinishedLensQuery.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/FinishedLensQuery.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/FinishedLensQuery.java
index 89053aa..a57a6e4 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/FinishedLensQuery.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/FinishedLensQuery.java
@@ -159,11 +159,11 @@ public class FinishedLensQuery {
   private String queryName;
 
   /**
-   * The selected driver class name.
+   * The selected driver's fully qualified name.
    */
   @Getter
   @Setter
-  private String driverClass;
+  private String driverName;
 
   @Getter
   private LensDriver selectedDriver;
@@ -197,14 +197,14 @@ public class FinishedLensQuery {
     }
     this.selectedDriver = ctx.getSelectedDriver();
     if (null != ctx.getSelectedDriver()) {
-      this.driverClass = ctx.getSelectedDriver().getClass().getName();
+      this.driverName = ctx.getSelectedDriver().getFullyQualifiedName();
     }
   }
 
   public QueryContext toQueryContext(Configuration conf, Collection<LensDriver> drivers) {
 
-    if (null == selectedDriver && null != driverClass) {
-      selectedDriver = getDriverFromClassName(drivers);
+    if (null == selectedDriver && null != driverName) {
+      selectedDriver = getDriverFromName(drivers);
     }
 
     QueryContext qctx =
@@ -223,12 +223,11 @@ public class FinishedLensQuery {
     return qctx;
   }
 
-  private LensDriver getDriverFromClassName(Collection<LensDriver> drivers) {
+  private LensDriver getDriverFromName(Collection<LensDriver> drivers) {
     Iterator<LensDriver> iterator = drivers.iterator();
     while (iterator.hasNext()) {
       LensDriver driver = iterator.next();
-      if (driverClass.equals(driver.getClass().getName())) {
-        //TODO : LENS-123 - Ability to load different instances of same driver class
+      if (driverName.equals(driver.getFullyQualifiedName())) {
         return driver;
       }
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server-api/src/main/java/org/apache/lens/server/api/query/PreparedQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/PreparedQueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/PreparedQueryContext.java
index b6f669b..0b08459 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/PreparedQueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/PreparedQueryContext.java
@@ -147,10 +147,9 @@ public class PreparedQueryContext extends AbstractQueryContext implements Delaye
    * @return the lens prepared query
    */
   public LensPreparedQuery toPreparedQuery() {
-    return new LensPreparedQuery(prepareHandle, userQuery, preparedTime, preparedUser,
-      getDriverContext().getSelectedDriver() != null ? getDriverContext().getSelectedDriver().getClass()
-        .getCanonicalName() : null, getDriverContext().getSelectedDriverQuery(),
-      lensConf);
+    return new LensPreparedQuery(prepareHandle, userQuery, preparedTime, preparedUser, getDriverContext()
+        .getSelectedDriver() != null ? getDriverContext().getSelectedDriver().getFullyQualifiedName() : null,
+        getDriverContext().getSelectedDriverQuery(), lensConf);
   }
 
   public String getQueryHandleString() {

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
index 9b491d1..b637665 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
@@ -306,8 +306,7 @@ public class QueryContext extends AbstractQueryContext {
    */
   public LensQuery toLensQuery() {
     return new LensQuery(queryHandle, userQuery, super.getSubmittedUser(), priority, isPersistent,
-      getSelectedDriver() != null ? getSelectedDriver().getClass()
-        .getCanonicalName() : null,
+      getSelectedDriver() != null ? getSelectedDriver().getFullyQualifiedName() : null,
       getSelectedDriverQuery(),
       status,
       resultSetPath, driverOpHandle, lensConf, submissionTime, launchTime, driverStatus.getDriverStartTime(),

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server-api/src/test/java/org/apache/lens/server/api/driver/MockDriver.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/test/java/org/apache/lens/server/api/driver/MockDriver.java b/lens-server-api/src/test/java/org/apache/lens/server/api/driver/MockDriver.java
index 2d86589..a20cf47 100644
--- a/lens-server-api/src/test/java/org/apache/lens/server/api/driver/MockDriver.java
+++ b/lens-server-api/src/test/java/org/apache/lens/server/api/driver/MockDriver.java
@@ -40,6 +40,7 @@ import org.apache.lens.server.api.query.cost.FactPartitionBasedQueryCost;
 import org.apache.lens.server.api.query.cost.QueryCost;
 
 import org.apache.hadoop.conf.Configuration;
+
 import org.apache.hive.service.cli.ColumnDescriptor;
 
 import com.beust.jcommander.internal.Sets;
@@ -48,7 +49,7 @@ import com.google.common.collect.ImmutableSet;
 /**
  * The Class MockDriver.
  */
-public class MockDriver implements LensDriver {
+public class MockDriver extends AbstractLensDriver {
   private static AtomicInteger mockDriverId = new AtomicInteger();
 
   /**
@@ -77,7 +78,7 @@ public class MockDriver implements LensDriver {
 
   @Override
   public String toString() {
-    return "MockDriver:" + driverId;
+    return getFullyQualifiedName()+":"+driverId;
   }
 
   @Override
@@ -91,10 +92,15 @@ public class MockDriver implements LensDriver {
    * @see org.apache.lens.server.api.driver.LensDriver#configure(org.apache.hadoop.conf.Configuration)
    */
   @Override
-  public void configure(Configuration conf) throws LensException {
+  public void configure(Configuration conf, String driverType, String driverName) throws LensException {
     this.conf = conf;
     ioTestVal = conf.getInt("mock.driver.test.val", -1);
-    this.conf.addResource("failing-query-driver-site.xml");
+    this.conf.addResource(getDriverResourcePath("failing-query-driver-site.xml"));
+  }
+
+  @Override
+  public String getFullyQualifiedName() {
+    return "mock/fail1";
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server-api/src/test/java/org/apache/lens/server/api/query/MockQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/test/java/org/apache/lens/server/api/query/MockQueryContext.java b/lens-server-api/src/test/java/org/apache/lens/server/api/query/MockQueryContext.java
index fd6b560..7768917 100644
--- a/lens-server-api/src/test/java/org/apache/lens/server/api/query/MockQueryContext.java
+++ b/lens-server-api/src/test/java/org/apache/lens/server/api/query/MockQueryContext.java
@@ -54,7 +54,7 @@ public class MockQueryContext extends QueryContext {
   public static List<LensDriver> getDrivers(Configuration conf) throws LensException {
     List<LensDriver> drivers = Lists.newArrayList();
     MockDriver d = new MockDriver();
-    d.configure(conf);
+    d.configure(conf, null, null);
     drivers.add(d);
     return drivers;
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java b/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
index 02b652e..a37a4c8 100644
--- a/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
+++ b/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
@@ -53,7 +53,7 @@ public class TestAbstractQueryContext {
     String uniqueMetridId = ctx.getConf().get(QUERY_METRIC_UNIQUE_ID_CONF_KEY);
     assertNotNull(uniqueMetridId);
     assertEquals(ctx.getSelectedDriverConf().get(QUERY_METRIC_DRIVER_STACK_NAME),
-      uniqueMetridId + "-" + MockDriver.class.getSimpleName());
+      uniqueMetridId + "-" + new MockDriver().getFullyQualifiedName());
   }
 
   @Test
@@ -70,7 +70,7 @@ public class TestAbstractQueryContext {
     ctx.estimateCostForDrivers();
     MetricRegistry reg = LensMetricsRegistry.getStaticRegistry();
     assertTrue(reg.getGauges().keySet().containsAll(Arrays.asList(
-      "lens.MethodMetricGauge.TestAbstractQueryContext-MockDriver-driverEstimate")));
+      "lens.MethodMetricGauge.TestAbstractQueryContext-"+new MockDriver().getFullyQualifiedName()+"-driverEstimate")));
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/main/java/org/apache/lens/server/query/LensServerDAO.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/LensServerDAO.java b/lens-server/src/main/java/org/apache/lens/server/query/LensServerDAO.java
index b9dd286..d8e654d 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/LensServerDAO.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/LensServerDAO.java
@@ -85,7 +85,7 @@ public class LensServerDAO {
       + "userquery varchar(10000) not null," + "submitter varchar(255) not null," + "starttime bigint, "
       + "endtime bigint," + "result varchar(255)," + "status varchar(255), " + "metadata varchar(100000), "
       + "rows int, " + "filesize bigint, " + "errormessage varchar(10000), " + "driverstarttime bigint, "
-      + "driverendtime bigint, " + "driverclass varchar(10000), "
+      + "driverendtime bigint, " + "drivername varchar(10000), "
       + "queryname varchar(255), " + "submissiontime bigint" + ")";
     try {
       QueryRunner runner = new QueryRunner(ds);
@@ -109,7 +109,7 @@ public class LensServerDAO {
       Connection conn = null;
       String sql = "insert into finished_queries (handle, userquery,submitter,"
         + "starttime,endtime,result,status,metadata,rows,filesize,"
-        + "errormessage,driverstarttime,driverendtime, driverclass, queryname, submissiontime)"
+        + "errormessage,driverstarttime,driverendtime, drivername, queryname, submissiontime)"
         + " values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
       try {
         conn = getConnection();
@@ -117,7 +117,7 @@ public class LensServerDAO {
         runner.update(conn, sql, query.getHandle(), query.getUserQuery(), query.getSubmitter(), query.getStartTime(),
           query.getEndTime(), query.getResult(), query.getStatus(), query.getMetadata(), query.getRows(),
           query.getFileSize(), query.getErrorMessage(), query.getDriverStartTime(), query.getDriverEndTime(),
-          query.getDriverClass(), query.getQueryName(), query.getSubmissionTime());
+          query.getDriverName(), query.getQueryName(), query.getSubmissionTime());
         conn.commit();
       } finally {
         DbUtils.closeQuietly(conn);
@@ -158,7 +158,7 @@ public class LensServerDAO {
    *
    * @param state     the state
    * @param user      the user
-   * @param driverName the driverClass
+   * @param driverName the driver's fully qualified Name
    * @param queryName the query name
    * @param fromDate  the from date
    * @param toDate    the to date
@@ -192,7 +192,7 @@ public class LensServerDAO {
       }
 
       if (StringUtils.isNotBlank(driverName)) {
-        filters.add("lower(driverclass)=?");
+        filters.add("lower(drivername)=?");
         params.add(driverName.toLowerCase());
       }
 


[42/50] [abbrv] lens git commit: LENS-890 : Adds per-queue and per-priority driver max launched queries constraints

Posted by sh...@apache.org.
LENS-890 : Adds per-queue and per-priority driver max launched queries constraints


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/4d3d2f82
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/4d3d2f82
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/4d3d2f82

Branch: refs/heads/LENS-581
Commit: 4d3d2f82fb93ee4d5c52dc3b4910573953094c0a
Parents: 73f9243
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Tue Dec 15 18:45:08 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Dec 15 18:45:08 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/api/util/CommonUtils.java   |  27 ++-
 .../FactPartitionBasedQueryCostCalculator.java  |   8 +-
 .../org/apache/lens/driver/hive/HiveDriver.java |  68 +++---
 .../apache/lens/driver/hive/TestHiveDriver.java | 233 ++++++++++++-------
 .../src/test/resources/priority_tests.data      |   1 +
 .../server/api/driver/AbstractLensDriver.java   |  13 +-
 .../lens/server/api/driver/LensDriver.java      |  13 +-
 .../server/api/query/AbstractQueryContext.java  |   9 +-
 .../lens/server/api/query/QueryContext.java     |  12 +-
 .../MaxConcurrentDriverQueriesConstraint.java   |  54 ++++-
 ...oncurrentDriverQueriesConstraintFactory.java |  49 +++-
 .../api/query/TestAbstractQueryContext.java     |   4 +-
 ...axConcurrentDriverQueriesConstraintTest.java | 181 +++++++++++++-
 .../server/query/QueryExecutionServiceImpl.java |   1 +
 .../ThreadSafeEstimatedQueryCollectionTest.java |   3 +-
 15 files changed, 527 insertions(+), 149 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-api/src/main/java/org/apache/lens/api/util/CommonUtils.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/util/CommonUtils.java b/lens-api/src/main/java/org/apache/lens/api/util/CommonUtils.java
index 38d58c7..119c924 100644
--- a/lens-api/src/main/java/org/apache/lens/api/util/CommonUtils.java
+++ b/lens-api/src/main/java/org/apache/lens/api/util/CommonUtils.java
@@ -27,6 +27,25 @@ public class CommonUtils {
 
   }
 
+  public interface EntryParser<K, V> {
+    K parseKey(String str);
+
+    V parseValue(String str);
+  }
+
+  private static EntryParser<String, String> defaultEntryParser = new EntryParser<String, String>() {
+    @Override
+    public String parseKey(String str) {
+      return str;
+    }
+
+    @Override
+    public String parseValue(String str) {
+      return str;
+    }
+  };
+
+
   /**
    * Splits given String str around non-escaped commas. Then parses each of the split element
    * as map entries in the format `key=value`. Constructs a map of such entries.
@@ -36,7 +55,11 @@ public class CommonUtils {
    * @return parsed map
    */
   public static Map<String, String> parseMapFromString(String str) {
-    Map<String, String> map = new HashMap<>();
+    return parseMapFromString(str, defaultEntryParser);
+  }
+
+  public static <K, V> Map<K, V> parseMapFromString(String str, EntryParser<K, V> parser) {
+    Map<K, V> map = new HashMap<>();
     if (str != null) {
       for (String kv : str.split("(?<!\\\\),")) {
         if (!kv.isEmpty()) {
@@ -49,7 +72,7 @@ public class CommonUtils {
           if (kvArray.length > 1) {
             value = kvArray[1].replaceAll("\\\\,", ",").trim();
           }
-          map.put(key, value);
+          map.put(parser.parseKey(key), parser.parseValue(value));
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java b/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java
index d56e1c7..9fecdbc 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/query/cost/FactPartitionBasedQueryCostCalculator.java
@@ -47,8 +47,11 @@ public class FactPartitionBasedQueryCostCalculator implements QueryCostCalculato
    */
 
   @SuppressWarnings("unchecked") // required for (Set<FactPartition>) casting
-  private double getTotalPartitionCost(final AbstractQueryContext queryContext, LensDriver driver)
+  private Double getTotalPartitionCost(final AbstractQueryContext queryContext, LensDriver driver)
     throws LensException {
+    if (queryContext.getDriverRewriterPlan(driver) == null) {
+      return null;
+    }
     double cost = 0;
     for (Map.Entry<String, Set<?>> entry : getAllPartitions(queryContext, driver).entrySet()) {
       // Have to do instanceof check, since it can't be handled by polymorphism.
@@ -86,7 +89,8 @@ public class FactPartitionBasedQueryCostCalculator implements QueryCostCalculato
 
   @Override
   public QueryCost calculateCost(final AbstractQueryContext queryContext, LensDriver driver) throws LensException {
-    return new FactPartitionBasedQueryCost(getTotalPartitionCost(queryContext, driver));
+    Double cost = getTotalPartitionCost(queryContext, driver);
+    return cost == null ? null : new FactPartitionBasedQueryCost(cost);
   }
 
   public Map<String, Set<?>> getAllPartitions(AbstractQueryContext queryContext, LensDriver driver) {

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index 253cfc4..7391f47 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -20,8 +20,14 @@ package org.apache.lens.driver.hive;
 
 import static org.apache.lens.server.api.util.LensUtil.getImplementations;
 
-import java.io.*;
-import java.util.*;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.ObjectInput;
+import java.io.ObjectOutput;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import java.util.concurrent.*;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.locks.Lock;
@@ -29,6 +35,7 @@ import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.lens.api.LensConf;
 import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.Priority;
 import org.apache.lens.api.query.QueryHandle;
 import org.apache.lens.api.query.QueryPrepareHandle;
 import org.apache.lens.cube.query.cost.FactPartitionBasedQueryCostCalculator;
@@ -50,7 +57,6 @@ import org.apache.lens.server.api.query.priority.CostToPriorityRangeConf;
 import org.apache.lens.server.api.query.priority.QueryPriorityDecider;
 
 import org.apache.commons.lang.StringUtils;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -66,7 +72,6 @@ import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.type.TypeReference;
 
 import com.google.common.collect.ImmutableSet;
-
 import lombok.Getter;
 import lombok.extern.slf4j.Slf4j;
 
@@ -110,6 +115,7 @@ public class HiveDriver extends AbstractLensDriver {
   private HiveConf hiveConf;
 
   /** The hive handles. */
+  @Getter
   private Map<QueryHandle, OperationHandle> hiveHandles = new ConcurrentHashMap<QueryHandle, OperationHandle>();
 
   /** The orphaned hive sessions. */
@@ -383,10 +389,12 @@ public class HiveDriver extends AbstractLensDriver {
 
   private QueryCost calculateQueryCost(AbstractQueryContext qctx) throws LensException {
     if (qctx.isOlapQuery()) {
-      return queryCostCalculator.calculateCost(qctx, this);
-    } else {
-      return new FactPartitionBasedQueryCost(Double.MAX_VALUE);
+      QueryCost cost = queryCostCalculator.calculateCost(qctx, this);
+      if (cost != null) {
+        return cost;
+      }
     }
+    return new FactPartitionBasedQueryCost(Double.MAX_VALUE);
   }
 
   @Override
@@ -548,22 +556,7 @@ public class HiveDriver extends AbstractLensDriver {
       addPersistentPath(ctx);
       Configuration qdconf = ctx.getDriverConf(this);
       qdconf.set("mapred.job.name", ctx.getQueryHandle().toString());
-      //Query is already explained.
-      log.info("whetherCalculatePriority: {}", whetherCalculatePriority);
-      if (whetherCalculatePriority) {
-        try {
-          // Inside try since non-data fetching queries can also be executed by async method.
-          String priority = ctx.calculateCostAndDecidePriority(this, queryCostCalculator, queryPriorityDecider)
-            .toString();
-          qdconf.set("mapred.job.priority", priority);
-          log.info("set priority to {}", priority);
-        } catch (Exception e) {
-          // not failing query launch when setting priority fails
-          // priority will be set to usually NORMAL - the default in underlying system.
-          log.error("could not set priority for lens session id:{} User query: {}", ctx.getLensSessionIdentifier(),
-            ctx.getUserQuery(), e);
-        }
-      }
+      decidePriority(ctx);
       queryHook.preLaunch(ctx);
       SessionHandle sessionHandle = getSession(ctx);
       OperationHandle op = getClient().executeStatementAsync(sessionHandle, ctx.getSelectedDriverQuery(),
@@ -809,6 +802,27 @@ public class HiveDriver extends AbstractLensDriver {
     return selectionPolicies;
   }
 
+  @Override
+  public Priority decidePriority(QueryContext ctx) {
+    if (whetherCalculatePriority && ctx.getDriverConf(this).get("mapred.job.priority") == null) {
+      try {
+        // Inside try since non-data fetching queries can also be executed by async method.
+        Priority priority = ctx.decidePriority(this, queryPriorityDecider);
+        String priorityStr = priority.toString();
+        ctx.getDriverConf(this).set("mapred.job.priority", priorityStr);
+        log.info("set priority to {}", priority);
+        return priority;
+      } catch (Exception e) {
+        // not failing query launch when setting priority fails
+        // priority will be set to usually NORMAL - the default in underlying system.
+        log.error("could not set priority for lens session id:{} User query: {}", ctx.getLensSessionIdentifier(),
+          ctx.getUserQuery(), e);
+        return null;
+      }
+    }
+    return null;
+  }
+
   protected CLIServiceClient getClient() throws LensException {
     if (isEmbedded) {
       if (embeddedConnection == null) {
@@ -837,7 +851,7 @@ public class HiveDriver extends AbstractLensDriver {
           thriftConnExpiryQueue.offer(connection);
           threadConnections.put(connectionKey, connection);
           log.info("New thrift connection {} for thread: {} for user: {} connection ID={} on driver:{}",
-              connectionClass, Thread.currentThread().getId(), user, connection.getConnId(), getFullyQualifiedName());
+            connectionClass, Thread.currentThread().getId(), user, connection.getConnId(), getFullyQualifiedName());
         } catch (Exception e) {
           throw new LensException(e);
         }
@@ -939,14 +953,14 @@ public class HiveDriver extends AbstractLensDriver {
           hiveSession = getClient().openSession(ctx.getClusterUser(), "");
           lensToHiveSession.put(sessionDbKey, hiveSession);
           log.info("New hive session for user: {} , lens session: {} , hive session handle: {} , driver : {}",
-              ctx.getClusterUser(), sessionDbKey, hiveSession.getHandleIdentifier(), getFullyQualifiedName());
+            ctx.getClusterUser(), sessionDbKey, hiveSession.getHandleIdentifier(), getFullyQualifiedName());
           for (LensEventListener<DriverEvent> eventListener : driverListeners) {
             try {
               eventListener.onEvent(new DriverSessionStarted(System.currentTimeMillis(), this, lensSession, hiveSession
                 .getSessionId().toString()));
             } catch (Exception exc) {
               log.error("Error sending driver {} start event to listener {}", getFullyQualifiedName(), eventListener,
-                 exc);
+                exc);
             }
           }
         } catch (Exception e) {
@@ -1218,7 +1232,7 @@ public class HiveDriver extends AbstractLensDriver {
       if (isSessionInvalid(exc, session)) {
         // We have to expire previous session
         log.info("{} Hive server session {} for lens session {} has become invalid", getFullyQualifiedName(), session,
-            lensSession);
+          lensSession);
         sessionLock.lock();
         try {
           // We should close all connections and clear the session map since

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
index 11efd3c..06552ea 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
@@ -33,7 +33,9 @@ import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.driver.*;
 import org.apache.lens.server.api.driver.DriverQueryStatus.DriverQueryState;
 import org.apache.lens.server.api.error.LensException;
-import org.apache.lens.server.api.query.*;
+import org.apache.lens.server.api.query.ExplainQueryContext;
+import org.apache.lens.server.api.query.PreparedQueryContext;
+import org.apache.lens.server.api.query.QueryContext;
 import org.apache.lens.server.api.query.cost.QueryCost;
 import org.apache.lens.server.api.query.priority.CostRangePriorityDecider;
 import org.apache.lens.server.api.query.priority.CostToPriorityRangeConf;
@@ -41,8 +43,10 @@ import org.apache.lens.server.api.user.MockDriverQueryHook;
 import org.apache.lens.server.api.util.LensUtil;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.ql.HiveDriverRunHook;
@@ -53,6 +57,7 @@ import org.apache.hive.service.cli.ColumnDescriptor;
 
 import org.testng.annotations.*;
 
+import com.beust.jcommander.internal.Maps;
 import com.google.common.collect.Lists;
 
 
@@ -81,6 +86,8 @@ public class TestHiveDriver {
 
   protected String sessionid;
   protected SessionState ss;
+  private CostRangePriorityDecider alwaysNormalPriorityDecider
+    = new CostRangePriorityDecider(new CostToPriorityRangeConf(""));
 
   /**
    * Before test.
@@ -173,6 +180,7 @@ public class TestHiveDriver {
    * @throws Exception the exception
    */
   protected void createTestTable(String tableName) throws Exception {
+    int handleSize = getHandleSize();
     System.out.println("Hadoop Location: " + System.getProperty("hadoop.bin.path"));
     String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(ID STRING)" + " TBLPROPERTIES ('"
       + LensConfConstants.STORAGE_COST + "'='500')";
@@ -186,7 +194,7 @@ public class TestHiveDriver {
     context = createContext(dataLoad, conf);
     resultSet = driver.execute(context);
     assertNull(resultSet);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   /**
@@ -196,6 +204,7 @@ public class TestHiveDriver {
    * @throws Exception the exception
    */
   protected void createPartitionedTable(String tableName) throws Exception {
+    int handleSize = getHandleSize();
     System.out.println("Hadoop Location: " + System.getProperty("hadoop.bin.path"));
     String createTable = "CREATE TABLE IF NOT EXISTS " + tableName + "(ID STRING)"
       + " PARTITIONED BY (dt string) TBLPROPERTIES ('"
@@ -212,7 +221,7 @@ public class TestHiveDriver {
     context = createContext(dataLoad, conf);
     resultSet = driver.execute(context);
     assertNull(resultSet);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   // Tests
@@ -241,6 +250,7 @@ public class TestHiveDriver {
    */
   @Test
   public void testTemptable() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("test_temp");
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
     Hive.get(conf).dropTable("test_temp_output");
@@ -248,15 +258,15 @@ public class TestHiveDriver {
     QueryContext context = createContext(query, conf);
     LensResultSet resultSet = driver.execute(context);
     assertNull(resultSet);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     // fetch results from temp table
     String select = "SELECT * FROM test_temp_output";
     context = createContext(select, conf);
     resultSet = driver.execute(context);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     validateInMemoryResult(resultSet, "test_temp_output");
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   /**
@@ -266,6 +276,7 @@ public class TestHiveDriver {
    */
   @Test
   public void testExecuteQuery() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("test_execute");
     LensResultSet resultSet = null;
     // Execute a select query
@@ -287,7 +298,7 @@ public class TestHiveDriver {
     context = createContext(select, conf);
     resultSet = driver.execute(context);
     validatePersistentResult(resultSet, TEST_DATA_FILE, context.getHDFSResultDir(), true);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   /**
@@ -383,6 +394,7 @@ public class TestHiveDriver {
    */
   @Test
   public void testExecuteQueryAsync() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("test_execute_sync");
 
     // Now run a command that would fail
@@ -392,11 +404,11 @@ public class TestHiveDriver {
     failConf.set("hive.exec.driver.run.hooks", FailHook.class.getCanonicalName());
     QueryContext context = createContext(expectFail, failConf);
     driver.executeAsync(context);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(context, DriverQueryState.FAILED, true, false);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     driver.closeQuery(context.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     // Async select query
     String select = "SELECT ID FROM test_execute_sync";
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
@@ -404,18 +416,18 @@ public class TestHiveDriver {
     driver.executeAsync(context);
     assertNotNull(context.getDriverConf(driver).get("mapred.job.name"));
     assertNotNull(context.getDriverConf(driver).get("mapred.job.priority"));
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, false, false);
     driver.closeQuery(context.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
     context = createContext(select, conf);
     driver.executeAsync(context);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, true, false);
     driver.closeQuery(context.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     conf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
       "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'"
@@ -424,10 +436,10 @@ public class TestHiveDriver {
     select = "SELECT ID, null, ID FROM test_execute_sync";
     context = createContext(select, conf);
     driver.executeAsync(context);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(context, DriverQueryState.SUCCESSFUL, true, true);
     driver.closeQuery(context.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   /**
@@ -483,6 +495,7 @@ public class TestHiveDriver {
    */
   @Test
   public void testCancelAsyncQuery() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("test_cancel_async");
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
     QueryContext context = createContext("SELECT ID FROM test_cancel_async", conf);
@@ -491,7 +504,7 @@ public class TestHiveDriver {
     driver.updateStatus(context);
     assertEquals(context.getDriverStatus().getState(), DriverQueryState.CANCELED, "Expecting query to be cancelled");
     driver.closeQuery(context.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     try {
       driver.cancelQuery(context.getQueryHandle());
@@ -512,7 +525,7 @@ public class TestHiveDriver {
    */
   private void validatePersistentResult(LensResultSet resultSet, String dataFile, Path outptuDir, boolean formatNulls)
     throws Exception {
-    assertTrue(resultSet instanceof HivePersistentResultSet);
+    assertTrue(resultSet instanceof HivePersistentResultSet, "resultset class: " + resultSet.getClass().getName());
     HivePersistentResultSet persistentResultSet = (HivePersistentResultSet) resultSet;
     String path = persistentResultSet.getOutputPath();
 
@@ -567,6 +580,7 @@ public class TestHiveDriver {
    */
   @Test
   public void testPersistentResultSet() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("test_persistent_result_set");
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
     conf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
@@ -574,14 +588,14 @@ public class TestHiveDriver {
     QueryContext ctx = createContext("SELECT ID FROM test_persistent_result_set", conf);
     LensResultSet resultSet = driver.execute(ctx);
     validatePersistentResult(resultSet, TEST_DATA_FILE, ctx.getHDFSResultDir(), false);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     ctx = createContext("SELECT ID FROM test_persistent_result_set", conf);
     driver.executeAsync(ctx);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(ctx, DriverQueryState.SUCCESSFUL, true, false);
     driver.closeQuery(ctx.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     conf.set(LensConfConstants.QUERY_OUTPUT_DIRECTORY_FORMAT,
       "ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'"
@@ -589,17 +603,17 @@ public class TestHiveDriver {
         + " 'field.delim'=','  ) STORED AS TEXTFILE ");
     ctx = createContext("SELECT ID, null, ID FROM test_persistent_result_set", conf);
     resultSet = driver.execute(ctx);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     validatePersistentResult(resultSet, TEST_DATA_FILE, ctx.getHDFSResultDir(), true);
     driver.closeQuery(ctx.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     ctx = createContext("SELECT ID, null, ID FROM test_persistent_result_set", conf);
     driver.executeAsync(ctx);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(ctx, DriverQueryState.SUCCESSFUL, true, true);
     driver.closeQuery(ctx.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   /**
@@ -640,6 +654,22 @@ public class TestHiveDriver {
     SessionState.setCurrentSessionState(ss);
     ExplainQueryContext ctx = createExplainContext("cube SELECT ID FROM test_cube", conf);
     ctx.setOlapQuery(true);
+    ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
+      @Override
+      public String getPlan() {
+        return null;
+      }
+
+      @Override
+      public QueryCost getCost() {
+        return null;
+      }
+
+      @Override
+      public Map<String, Set<?>> getPartitions() {
+        return Maps.newHashMap();
+      }
+    });
     QueryCost cost = driver.estimate(ctx);
     assertEquals(cost.getEstimatedResourceUsage(), 0.0);
     cost.getEstimatedExecTimeMillis();
@@ -666,14 +696,14 @@ public class TestHiveDriver {
    */
   @Test
   public void testExplain() throws Exception {
+    int handleSize = getHandleSize();
     SessionState.setCurrentSessionState(ss);
     SessionState.get().setCurrentDatabase(dataBase);
     createTestTable("test_explain");
-
     DriverQueryPlan plan = driver.explain(createExplainContext("SELECT ID FROM test_explain", conf));
     assertTrue(plan instanceof HiveQueryPlan);
     assertEquals(plan.getTableWeight(dataBase + ".test_explain"), 500.0);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     // test execute prepare
     PreparedQueryContext pctx = new PreparedQueryContext("SELECT ID FROM test_explain", null, conf, drivers);
@@ -686,36 +716,37 @@ public class TestHiveDriver {
     plan = driver.explainAndPrepare(pctx);
     QueryContext qctx = createContext(pctx, inConf);
     LensResultSet result = driver.execute(qctx);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     validateInMemoryResult(result);
 
     // test execute prepare async
+    conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
     qctx = createContext(pctx, conf);
     driver.executeAsync(qctx);
     assertNotNull(qctx.getDriverOpHandle());
     validateExecuteAsync(qctx, DriverQueryState.SUCCESSFUL, true, false);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
 
     driver.closeQuery(qctx.getQueryHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
 
     // for backward compatibility
     qctx = createContext(pctx, inConf);
     qctx.setQueryHandle(new QueryHandle(pctx.getPrepareHandle().getPrepareHandleId()));
     result = driver.execute(qctx);
     assertNotNull(qctx.getDriverOpHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     validateInMemoryResult(result);
     // test execute prepare async
     qctx = createContext(pctx, conf);
     qctx.setQueryHandle(new QueryHandle(pctx.getPrepareHandle().getPrepareHandleId()));
     driver.executeAsync(qctx);
-    assertEquals(1, driver.getHiveHandleSize());
+    assertHandleSize(handleSize + 1);
     validateExecuteAsync(qctx, DriverQueryState.SUCCESSFUL, true, false);
 
     driver.closeQuery(qctx.getQueryHandle());
     driver.closePreparedQuery(pctx.getPrepareHandle());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
   }
 
   /**
@@ -725,11 +756,12 @@ public class TestHiveDriver {
    */
   @Test
   public void testExplainPartitionedTable() throws Exception {
+    int handleSize = getHandleSize();
     createPartitionedTable("test_part_table");
     // acquire
     SessionState.setCurrentSessionState(ss);
     DriverQueryPlan plan = driver.explain(createExplainContext("SELECT ID FROM test_part_table", conf));
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     assertTrue(plan instanceof HiveQueryPlan);
     assertNotNull(plan.getTablesQueried());
     assertEquals(plan.getTablesQueried().size(), 1);
@@ -749,15 +781,15 @@ public class TestHiveDriver {
    */
   @Test
   public void testExplainOutput() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("explain_test_1");
     createTestTable("explain_test_2");
-
     SessionState.setCurrentSessionState(ss);
     DriverQueryPlan plan = driver.explain(createExplainContext("SELECT explain_test_1.ID, count(1) FROM "
       + " explain_test_1  join explain_test_2 on explain_test_1.ID = explain_test_2.ID"
       + " WHERE explain_test_1.ID = 'foo' or explain_test_2.ID = 'bar'" + " GROUP BY explain_test_1.ID", conf));
 
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     assertTrue(plan instanceof HiveQueryPlan);
     assertNotNull(plan.getTablesQueried());
     assertEquals(plan.getTablesQueried().size(), 2);
@@ -775,6 +807,7 @@ public class TestHiveDriver {
    */
   @Test
   public void testExplainOutputPersistent() throws Exception {
+    int handleSize = getHandleSize();
     createTestTable("explain_test_1");
     conf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
     SessionState.setCurrentSessionState(ss);
@@ -784,19 +817,36 @@ public class TestHiveDriver {
     pctx.setLensSessionIdentifier(sessionid);
     DriverQueryPlan plan2 = driver.explainAndPrepare(pctx);
     // assertNotNull(plan2.getResultDestination());
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     assertNotNull(plan2.getTablesQueried());
     assertEquals(plan2.getTablesQueried().size(), 1);
     assertTrue(plan2.getTableWeights().containsKey(dataBase + ".explain_test_1"));
     QueryContext ctx = createContext(pctx, conf);
     LensResultSet resultSet = driver.execute(ctx);
-    assertEquals(0, driver.getHiveHandleSize());
+    assertHandleSize(handleSize);
     HivePersistentResultSet persistentResultSet = (HivePersistentResultSet) resultSet;
     String path = persistentResultSet.getOutputPath();
     assertEquals(ctx.getDriverResultPath(), path);
     driver.closeQuery(plan2.getHandle());
   }
 
+  @DataProvider
+  public Object[][] priorityDataProvider() throws IOException, ParseException {
+    BufferedReader br = new BufferedReader(new InputStreamReader(
+      TestHiveDriver.class.getResourceAsStream("/priority_tests.data")));
+    String line;
+    int numTests = Integer.parseInt(br.readLine());
+    Object[][] data = new Object[numTests][2];
+    for (int i = 0; i < numTests; i++) {
+      String[] kv = br.readLine().split("\\s*:\\s*");
+      final Set<FactPartition> partitions = getFactParts(Arrays.asList(kv[0].trim().split("\\s*,\\s*")));
+      final Priority expected = Priority.valueOf(kv[1]);
+      data[i] = new Object[]{partitions, expected};
+    }
+    return data;
+  }
+
+
   /**
    * Testing Duration Based Priority Logic by mocking everything except partitions.
    *
@@ -804,57 +854,47 @@ public class TestHiveDriver {
    * @throws LensException
    * @throws ParseException
    */
-  @Test
-  public void testPriority() throws IOException, LensException, ParseException {
+  @Test(dataProvider = "priorityDataProvider")
+  public void testPriority(final Set<FactPartition> partitions, Priority expected) throws Exception {
     Configuration conf = new Configuration();
-    CostRangePriorityDecider alwaysNormalPriorityDecider =
-      new CostRangePriorityDecider(new CostToPriorityRangeConf(""));
-    BufferedReader br = new BufferedReader(new InputStreamReader(
-      TestHiveDriver.class.getResourceAsStream("/priority_tests.data")));
-    String line;
-    int i = 0;
-    while ((line = br.readLine()) != null) {
-      String[] kv = line.split("\\s*:\\s*");
+    QueryContext ctx = createContext("test priority query", conf);
+    ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
 
-      final Set<FactPartition> partitions = getFactParts(Arrays.asList(kv[0].trim().split("\\s*,\\s*")));
-      final Priority expected = Priority.valueOf(kv[1]);
-      QueryContext ctx = createContext("test priority query", conf);
-      ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
+      @Override
+      public String getPlan() {
+        return null;
+      }
 
-        @Override
-        public String getPlan() {
-          return null;
-        }
+      @Override
+      public QueryCost getCost() {
+        return null;
+      }
+    });
 
-        @Override
-        public QueryCost getCost() {
-          return null;
+    ctx.getDriverContext().getDriverRewriterPlan(driver).getPartitions().putAll(
+      new HashMap<String, Set<FactPartition>>() {
+        {
+          put("table1", partitions);
         }
       });
+    // table weights only for first calculation
+    ctx.getDriverContext().getDriverRewriterPlan(driver).getTableWeights().putAll(
+      new HashMap<String, Double>() {
+        {
+          put("table1", 1.0);
+        }
+      });
+    ctx.setOlapQuery(true);
+    Priority priority = driver.decidePriority(ctx);
+    assertEquals(priority, expected, "cost: " + ctx.getDriverQueryCost(driver) + "priority: " + priority);
+    assertEquals(ctx.decidePriority(driver,
+      alwaysNormalPriorityDecider), Priority.NORMAL);
+  }
 
-      ctx.getDriverContext().getDriverRewriterPlan(driver).getPartitions().putAll(
-        new HashMap<String, Set<FactPartition>>() {
-          {
-            put("table1", partitions);
-          }
-        });
-      if (i < 1) {
-        // table weights only for first calculation
-        ctx.getDriverContext().getDriverRewriterPlan(driver).getTableWeights().putAll(
-          new HashMap<String, Double>() {
-            {
-              put("table1", 1.0);
-            }
-          });
-      }
-      assertEquals(ctx.calculateCostAndDecidePriority(driver, driver.queryCostCalculator,
-        driver.queryPriorityDecider), expected);
-      assertEquals(ctx.calculateCostAndDecidePriority(driver, driver.queryCostCalculator,
-        alwaysNormalPriorityDecider), Priority.NORMAL);
-      i++;
-    }
+  @Test
+  public void testPriorityWithoutFactPartitions() throws LensException {
     // test priority without fact partitions
-    AbstractQueryContext ctx = createContext("test priority query", conf);
+    QueryContext ctx = createContext("test priority query", conf);
     ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
 
       @Override
@@ -881,15 +921,25 @@ public class TestHiveDriver {
         }
       });
     ctx.setDriverCost(driver, driver.queryCostCalculator.calculateCost(ctx, driver));
-    assertEquals(Priority.VERY_HIGH, driver.queryPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)));
-    assertEquals(Priority.NORMAL, alwaysNormalPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)));
+    assertEquals(driver.decidePriority(ctx), Priority.VERY_HIGH);
+    assertEquals(alwaysNormalPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)), Priority.NORMAL);
 
     // test priority without rewriter plan
     ctx = createContext("test priority query", conf);
-    ctx.setDriverCost(driver, driver.queryCostCalculator.calculateCost(ctx, driver));
-    assertEquals(Priority.VERY_HIGH, driver.queryPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)));
-    assertEquals(Priority.NORMAL, alwaysNormalPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)));
+    ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
+      @Override
+      public String getPlan() {
+        return null;
+      }
 
+      @Override
+      public QueryCost getCost() {
+        return null;
+      }
+    });
+    ctx.setDriverCost(driver, driver.queryCostCalculator.calculateCost(ctx, driver));
+    assertEquals(driver.decidePriority(ctx), Priority.VERY_HIGH);
+    assertEquals(alwaysNormalPriorityDecider.decidePriority(ctx.getDriverQueryCost(driver)), Priority.NORMAL);
   }
 
   private Set<FactPartition> getFactParts(List<String> partStrings) throws ParseException {
@@ -915,4 +965,13 @@ public class TestHiveDriver {
     }
     return factParts;
   }
+
+  private int getHandleSize() {
+    return driver.getHiveHandleSize();
+  }
+
+  private void assertHandleSize(int handleSize) {
+    assertEquals(getHandleSize(), handleSize, "Unexpected handle size, all handles: "
+      + driver.getHiveHandles());
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-driver-hive/src/test/resources/priority_tests.data
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/resources/priority_tests.data b/lens-driver-hive/src/test/resources/priority_tests.data
index 98b82ef..177743e 100644
--- a/lens-driver-hive/src/test/resources/priority_tests.data
+++ b/lens-driver-hive/src/test/resources/priority_tests.data
@@ -1,3 +1,4 @@
+4
 dt 2014-01-02-01: VERY_HIGH
 dt 2013-12,dt 2014-01-01, dt 2014-01-02-00, dt 2014-01-02-01: HIGH
 dt 2013-12,dt 2014-01, dt 2014-02, dt 2014-02-01-00: NORMAL

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java
index 55f1535..ed1fc43 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/AbstractLensDriver.java
@@ -19,11 +19,12 @@
 package org.apache.lens.server.api.driver;
 
 
+import org.apache.lens.api.Priority;
 import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.error.LensException;
+import org.apache.lens.server.api.query.QueryContext;
 
 import org.apache.commons.lang.StringUtils;
-
 import org.apache.hadoop.conf.Configuration;
 
 import lombok.Getter;
@@ -49,7 +50,7 @@ public abstract class AbstractLensDriver implements LensDriver {
     if (StringUtils.isBlank(driverType) || StringUtils.isBlank(driverName)) {
       throw new LensException("Driver Type and Name can not be null or empty");
     }
-    fullyQualifiedName =  new StringBuilder(driverType).append(SEPARATOR).append(driverName).toString();
+    fullyQualifiedName = new StringBuilder(driverType).append(SEPARATOR).append(driverName).toString();
   }
 
   /**
@@ -61,7 +62,13 @@ public abstract class AbstractLensDriver implements LensDriver {
    */
   protected String getDriverResourcePath(String resourceName) {
     return new StringBuilder(LensConfConstants.DRIVERS_BASE_DIR).append(SEPARATOR).append(getFullyQualifiedName())
-        .append(SEPARATOR).append(resourceName).toString();
+      .append(SEPARATOR).append(resourceName).toString();
+  }
+
+  @Override
+  public Priority decidePriority(QueryContext queryContext) {
+    // no-op by default
+    return null;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
index c220884..3d38ddd 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/driver/LensDriver.java
@@ -20,11 +20,14 @@ package org.apache.lens.server.api.driver;
 
 import java.io.Externalizable;
 
+import org.apache.lens.api.Priority;
 import org.apache.lens.api.query.QueryHandle;
 import org.apache.lens.api.query.QueryPrepareHandle;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.events.LensEventListener;
-import org.apache.lens.server.api.query.*;
+import org.apache.lens.server.api.query.AbstractQueryContext;
+import org.apache.lens.server.api.query.PreparedQueryContext;
+import org.apache.lens.server.api.query.QueryContext;
 import org.apache.lens.server.api.query.collect.WaitingQueriesSelectionPolicy;
 import org.apache.lens.server.api.query.constraint.QueryLaunchingConstraint;
 import org.apache.lens.server.api.query.cost.QueryCost;
@@ -208,4 +211,12 @@ public interface LensDriver extends Externalizable {
    * (Examples: hive/hive1, jdbc/mysql1 )
    */
   String getFullyQualifiedName();
+
+  /**
+   * decide priority based on query's cost. The cost should be already computed by estimate call, but it's
+   * not guaranteed to be pre-computed. It's up to the driver to do an on-demand computation of cost.
+   * @see QueryContext#decidePriority(LensDriver, QueryPriorityDecider) that handles this on-demand computation.
+   * @param queryContext
+   */
+  Priority decidePriority(QueryContext queryContext);
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
index 2f20113..62ed293 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/AbstractQueryContext.java
@@ -19,7 +19,10 @@
 package org.apache.lens.server.api.query;
 
 import java.io.Serializable;
-import java.util.*;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
@@ -206,6 +209,10 @@ public abstract class AbstractQueryContext implements Serializable {
     return getDriverContext().getDriverRewriterPlan(driver);
   }
 
+  public String getQueue() {
+    return getConf().get(LensConfConstants.MAPRED_JOB_QUEUE_NAME);
+  }
+
   /**
    * Runnable to wrap estimate computation for a driver. Failure cause and success status
    * are stored as field members

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
index b637665..1269e45 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/QueryContext.java
@@ -34,7 +34,6 @@ import org.apache.lens.server.api.driver.LensDriver;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.query.collect.WaitingQueriesSelectionPolicy;
 import org.apache.lens.server.api.query.constraint.QueryLaunchingConstraint;
-import org.apache.lens.server.api.query.cost.QueryCostCalculator;
 import org.apache.lens.server.api.query.priority.QueryPriorityDecider;
 
 import org.apache.hadoop.conf.Configuration;
@@ -226,7 +225,6 @@ public class QueryContext extends AbstractQueryContext {
     this.submissionTime = submissionTime;
     this.queryHandle = new QueryHandle(UUID.randomUUID());
     this.status = new QueryStatus(0.0f, null, Status.NEW, "Query just got created", false, null, null, null);
-    this.priority = Priority.NORMAL;
     this.lensConf = qconf;
     this.conf = conf;
     this.isPersistent = conf.getBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_SET,
@@ -429,15 +427,17 @@ public class QueryContext extends AbstractQueryContext {
   }
 
   public Priority decidePriority(LensDriver driver, QueryPriorityDecider queryPriorityDecider) throws LensException {
+    // On-demand re-computation of cost, in case it's not alredy set by a previous estimate call.
+    // In driver test cases, estimate doesn't happen. Hence this code path ensures cost is computed and
+    // priority is set based on correct cost.
+    calculateCost(driver);
     priority = queryPriorityDecider.decidePriority(getDriverQueryCost(driver));
     return priority;
   }
 
-  public Priority calculateCostAndDecidePriority(LensDriver driver, QueryCostCalculator queryCostCalculator,
-    QueryPriorityDecider queryPriorityDecider) throws LensException {
+  private void calculateCost(LensDriver driver) throws LensException {
     if (getDriverQueryCost(driver) == null) {
-      setDriverCost(driver, queryCostCalculator.calculateCost(this, driver));
+      setDriverCost(driver, driver.estimate(this));
     }
-    return decidePriority(driver, queryPriorityDecider);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraint.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraint.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraint.java
index bae2e64..58ebd9a 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraint.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraint.java
@@ -19,29 +19,71 @@
 
 package org.apache.lens.server.api.query.constraint;
 
+import java.util.Map;
+
+import org.apache.lens.api.Priority;
 import org.apache.lens.server.api.driver.LensDriver;
 import org.apache.lens.server.api.query.QueryContext;
 import org.apache.lens.server.api.query.collect.EstimatedImmutableQueryCollection;
 
+import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
+@RequiredArgsConstructor
 public class MaxConcurrentDriverQueriesConstraint implements QueryLaunchingConstraint {
 
   private final int maxConcurrentQueries;
-
-  public MaxConcurrentDriverQueriesConstraint(final int maxConcurrentQueries) {
-    this.maxConcurrentQueries = maxConcurrentQueries;
-  }
+  private final Map<String, Integer> maxConcurrentQueriesPerQueue;
+  private final Map<Priority, Integer> maxConcurrentQueriesPerPriority;
 
   @Override
   public boolean allowsLaunchOf(
     final QueryContext candidateQuery, final EstimatedImmutableQueryCollection launchedQueries) {
 
     final LensDriver selectedDriver = candidateQuery.getSelectedDriver();
-    final boolean canLaunch = (launchedQueries.getQueriesCount(selectedDriver) < maxConcurrentQueries);
-
+    final boolean canLaunch = (launchedQueries.getQueriesCount(selectedDriver) < maxConcurrentQueries)
+      && canLaunchWithQueueConstraint(candidateQuery, launchedQueries)
+      && canLaunchWithPriorityConstraint(candidateQuery, launchedQueries);
     log.debug("canLaunch:{}", canLaunch);
     return canLaunch;
   }
+
+  private boolean canLaunchWithQueueConstraint(QueryContext candidateQuery, EstimatedImmutableQueryCollection
+    launchedQueries) {
+    if (maxConcurrentQueriesPerQueue == null) {
+      return true;
+    }
+    String queue = candidateQuery.getQueue();
+    Integer limit = maxConcurrentQueriesPerQueue.get(queue);
+    if (limit == null) {
+      return true;
+    }
+    int launchedOnQueue = 0;
+    for (QueryContext context : launchedQueries.getQueries(candidateQuery.getSelectedDriver())) {
+      if (context.getQueue().equals(queue)) {
+        launchedOnQueue++;
+      }
+    }
+    return launchedOnQueue < limit;
+  }
+
+  private boolean canLaunchWithPriorityConstraint(QueryContext candidateQuery, EstimatedImmutableQueryCollection
+    launchedQueries) {
+    if (maxConcurrentQueriesPerPriority == null) {
+      return true;
+    }
+    Priority priority = candidateQuery.getPriority();
+    Integer limit = maxConcurrentQueriesPerPriority.get(priority);
+    if (limit == null) {
+      return true;
+    }
+    int launchedOnPriority = 0;
+    for (QueryContext context : launchedQueries.getQueries(candidateQuery.getSelectedDriver())) {
+      if (context.getPriority().equals(priority)) {
+        launchedOnPriority++;
+      }
+    }
+    return launchedOnPriority < limit;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintFactory.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintFactory.java b/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintFactory.java
index b6e6c2f..6db7da7 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintFactory.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintFactory.java
@@ -19,21 +19,60 @@
 
 package org.apache.lens.server.api.query.constraint;
 
-import static java.lang.Integer.parseInt;
+import static org.apache.lens.api.util.CommonUtils.parseMapFromString;
 
+import java.util.Map;
+
+import org.apache.lens.api.Priority;
+import org.apache.lens.api.util.CommonUtils.EntryParser;
 import org.apache.lens.server.api.common.ConfigBasedObjectCreationFactory;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 
 public class MaxConcurrentDriverQueriesConstraintFactory
-    implements ConfigBasedObjectCreationFactory<QueryLaunchingConstraint> {
+  implements ConfigBasedObjectCreationFactory<MaxConcurrentDriverQueriesConstraint> {
 
   public static final String MAX_CONCURRENT_QUERIES_KEY = "driver.max.concurrent.launched.queries";
+  private static final String PREFIX = MAX_CONCURRENT_QUERIES_KEY + ".per.";
+  public static final String MAX_CONCURRENT_QUERIES_PER_QUEUE_KEY = PREFIX + "queue";
+  public static final String MAX_CONCURRENT_QUERIES_PER_PRIORITY_KEY = PREFIX + "priority";
+  private static final EntryParser<String, Integer> STRING_INT_PARSER = new EntryParser<String, Integer>() {
+    @Override
+    public String parseKey(String str) {
+      return str;
+    }
+
+    @Override
+    public Integer parseValue(String str) {
+      return Integer.valueOf(str);
+    }
+  };
+  private static final EntryParser<Priority, Integer> PRIORITY_INT_PARSER = new EntryParser<Priority, Integer>() {
+    @Override
+    public Priority parseKey(String str) {
+      return Priority.valueOf(str.toUpperCase());
+    }
+
+    @Override
+    public Integer parseValue(String str) {
+      return Integer.valueOf(str);
+    }
+  };
 
   @Override
-  public QueryLaunchingConstraint create(final Configuration conf) {
+  public MaxConcurrentDriverQueriesConstraint create(final Configuration conf) {
+    String maxConcurrentQueriesValue = conf.get(MAX_CONCURRENT_QUERIES_KEY);
+    Map<String, Integer> maxConcurrentQueriesPerQueue = parseMapFromString(
+      conf.get(MAX_CONCURRENT_QUERIES_PER_QUEUE_KEY), STRING_INT_PARSER);
+    Map<Priority, Integer> maxConcurrentQueriesPerPriority = parseMapFromString(
+      conf.get(MAX_CONCURRENT_QUERIES_PER_PRIORITY_KEY), PRIORITY_INT_PARSER);
+    int maxConcurrentQueries = Integer.MAX_VALUE;
+    if (!StringUtils.isBlank(maxConcurrentQueriesValue)) {
+      maxConcurrentQueries = Integer.parseInt(maxConcurrentQueriesValue);
+    }
+    return new MaxConcurrentDriverQueriesConstraint(maxConcurrentQueries, maxConcurrentQueriesPerQueue,
+      maxConcurrentQueriesPerPriority);
 
-    int maxConcurrentQueries = parseInt(conf.get(MAX_CONCURRENT_QUERIES_KEY));
-    return new MaxConcurrentDriverQueriesConstraint(maxConcurrentQueries);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java b/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
index a37a4c8..5af45ed 100644
--- a/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
+++ b/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
@@ -33,7 +33,6 @@ import org.apache.lens.server.api.driver.LensDriver;
 import org.apache.lens.server.api.driver.MockDriver;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.metrics.LensMetricsRegistry;
-import org.apache.lens.server.api.query.cost.MockQueryCostCalculator;
 import org.apache.lens.server.api.query.priority.MockQueryPriorityDecider;
 
 import org.apache.hadoop.conf.Configuration;
@@ -101,8 +100,7 @@ public class TestAbstractQueryContext {
   @Test
   public void testPrioritySetting() throws LensException {
     MockQueryContext ctx = new MockQueryContext();
-    Priority p = ctx.calculateCostAndDecidePriority(ctx.getSelectedDriver(), new
-      MockQueryCostCalculator(), new MockQueryPriorityDecider());
+    Priority p = ctx.decidePriority(ctx.getSelectedDriver(), new MockQueryPriorityDecider());
     assertEquals(p, HIGH);
     assertEquals(ctx.getPriority(), HIGH);
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server-api/src/test/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintTest.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/test/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintTest.java b/lens-server-api/src/test/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintTest.java
index 55a2eea..4031122 100644
--- a/lens-server-api/src/test/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintTest.java
+++ b/lens-server-api/src/test/java/org/apache/lens/server/api/query/constraint/MaxConcurrentDriverQueriesConstraintTest.java
@@ -19,10 +19,17 @@
 
 package org.apache.lens.server.api.query.constraint;
 
+import static org.apache.lens.api.Priority.*;
+import static org.apache.lens.server.api.LensServerAPITestUtil.getConfiguration;
+
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 import static org.testng.Assert.assertEquals;
 
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.lens.api.Priority;
 import org.apache.lens.server.api.driver.LensDriver;
 import org.apache.lens.server.api.query.QueryContext;
 import org.apache.lens.server.api.query.collect.EstimatedImmutableQueryCollection;
@@ -30,18 +37,117 @@ import org.apache.lens.server.api.query.collect.EstimatedImmutableQueryCollectio
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
+import junit.framework.Assert;
+import lombok.Data;
+
 public class MaxConcurrentDriverQueriesConstraintTest {
 
+  MaxConcurrentDriverQueriesConstraintFactory factory = new MaxConcurrentDriverQueriesConstraintFactory();
+  QueryLaunchingConstraint constraint = factory.create(getConfiguration(
+    "driver.max.concurrent.launched.queries", 10
+  ));
+  QueryLaunchingConstraint perQueueConstraint = factory.create(getConfiguration(
+    "driver.max.concurrent.launched.queries", 4,
+    "driver.max.concurrent.launched.queries.per.queue", "q1=2,q2=3"
+  ));
+
+  QueryLaunchingConstraint perPriorityConstraint = factory.create(getConfiguration(
+    "driver.max.concurrent.launched.queries", 4,
+    "driver.max.concurrent.launched.queries.per.priority", "NORMAL=2,HIGH=3"
+  ));
+
+  QueryLaunchingConstraint perQueueAndPerPriorityConstraint = factory.create(getConfiguration(
+    "driver.max.concurrent.launched.queries.per.queue", "q1=2,q2=3",
+    "driver.max.concurrent.launched.queries.per.priority", "NORMAL=2,HIGH=3"
+  ));
+
   @DataProvider
   public Object[][] dpTestAllowsLaunchOfQuery() {
-    return new Object[][] { {2, true} , {10, false}, {11, false}};
+    return new Object[][]{{2, true}, {10, false}, {11, false}};
+  }
+
+  @DataProvider
+  public Object[][] dpTestPerQueueConstraints() {
+    return new Object[][]{
+      {queues("q1", "q2"), "q1", true},
+      {queues("q1", "q1"), "q2", true},
+      {queues("q1", "q1"), "q3", true},
+      {queues("q1", "q1", "q1"), "q2", true}, // hypothetical
+      {queues("q1", "q1", "q2"), "q1", false},
+      {queues("q1", "q2", "q2"), "q1", true},
+      {queues("q1", "q2", "q2"), "q2", true},
+      {queues("q1", "q2", "q1", "q2"), "q2", false},
+      {queues("q1", "q2", "q1", "q2"), "q1", false},
+      {queues("q1", "q2", "q1", "q2"), "q3", false},
+    };
+  }
+
+  @DataProvider
+  public Object[][] dpTestPerPriorityConstraints() {
+    return new Object[][]{
+      {priorities(NORMAL, HIGH), NORMAL, true},
+      {priorities(NORMAL, NORMAL), HIGH, true},
+      {priorities(NORMAL, NORMAL), LOW, true},
+      {priorities(NORMAL, NORMAL, NORMAL), HIGH, true}, // hypothetical
+      {priorities(NORMAL, NORMAL, HIGH), NORMAL, false},
+      {priorities(NORMAL, HIGH, HIGH), NORMAL, true},
+      {priorities(NORMAL, HIGH, HIGH), HIGH, true},
+      {priorities(NORMAL, HIGH, NORMAL, HIGH), HIGH, false},
+      {priorities(NORMAL, HIGH, NORMAL, HIGH), NORMAL, false},
+      {priorities(NORMAL, HIGH, NORMAL, HIGH), LOW, false},
+    };
+  }
+
+  @DataProvider
+  public Object[][] dpTestPerQueuePerPriorityConstraints() {
+    return new Object[][]{
+      {queuePriorities("q1", NORMAL, "q2", NORMAL), "q2", NORMAL, false}, // can't launch NORMAL
+      {queuePriorities("q1", NORMAL, "q1", HIGH), "q1", NORMAL, false}, // can't launch on q1
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH), "q2", NORMAL, true}, // can launch NORMAL on q2
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", HIGH), "q2", NORMAL, true},
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", NORMAL), "q2", NORMAL, false}, // hypothetical
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", HIGH, "q2", NORMAL), "q3", NORMAL, false},
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", HIGH, "q2", NORMAL), "q3", HIGH, false},
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", HIGH, "q2", NORMAL), "q1", LOW, false},
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", HIGH, "q2", NORMAL), "q2", LOW, false},
+      {queuePriorities("q1", NORMAL, "q1", HIGH, "q2", HIGH, "q2", HIGH, "q2", NORMAL), "q3", LOW, true},
+    };
+  }
+
+  @Data
+  public static class QueuePriority {
+    private final String queue;
+    private final Priority priority;
+  }
+
+  private static QueuePriority[] queuePriorities(Object... args) {
+    Assert.assertEquals(args.length % 2, 0);
+    QueuePriority[] queuePriorities = new QueuePriority[args.length / 2];
+    for (int i = 0; i < args.length; i += 2) {
+      queuePriorities[i / 2] = new QueuePriority((String) args[i], (Priority) args[i + 1]);
+    }
+    return queuePriorities;
+  }
+
+  private static String[] queues(Object... args) {
+    String[] queues = new String[args.length];
+    for (int i = 0; i < args.length; i++) {
+      queues[i] = (String) args[i];
+    }
+    return queues;
+  }
+
+  private static Priority[] priorities(Object... args) {
+    Priority[] priorities = new Priority[args.length];
+    for (int i = 0; i < args.length; i++) {
+      priorities[i] = (Priority) args[i];
+    }
+    return priorities;
   }
 
   @Test(dataProvider = "dpTestAllowsLaunchOfQuery")
   public void testAllowsLaunchOfQuery(final int currentDriverLaunchedQueries, final boolean expectedCanLaunch) {
 
-    int maxConcurrentQueries = 10;
-
     QueryContext mockCandidateQuery = mock(QueryContext.class);
     EstimatedImmutableQueryCollection mockLaunchedQueries = mock(EstimatedImmutableQueryCollection.class);
     LensDriver mockDriver = mock(LensDriver.class);
@@ -49,9 +155,76 @@ public class MaxConcurrentDriverQueriesConstraintTest {
     when(mockCandidateQuery.getSelectedDriver()).thenReturn(mockDriver);
     when(mockLaunchedQueries.getQueriesCount(mockDriver)).thenReturn(currentDriverLaunchedQueries);
 
-    QueryLaunchingConstraint constraint = new MaxConcurrentDriverQueriesConstraint(maxConcurrentQueries);
     boolean actualCanLaunch = constraint.allowsLaunchOf(mockCandidateQuery, mockLaunchedQueries);
 
     assertEquals(actualCanLaunch, expectedCanLaunch);
   }
+
+  @Test(dataProvider = "dpTestPerQueueConstraints")
+  public void testPerQueueConstraints(final String[] launchedQueues, final String candidateQueue,
+    final boolean expectedCanLaunch) {
+    EstimatedImmutableQueryCollection mockLaunchedQueries = mock(EstimatedImmutableQueryCollection.class);
+    LensDriver mockDriver = mock(LensDriver.class);
+    Set<QueryContext> launchedQueries = new HashSet<>();
+    for (String queue : launchedQueues) {
+      QueryContext context = mock(QueryContext.class);
+      when(context.getQueue()).thenReturn(queue);
+      launchedQueries.add(context);
+    }
+    when(mockLaunchedQueries.getQueries(mockDriver)).thenReturn(launchedQueries);
+    when(mockLaunchedQueries.getQueriesCount(mockDriver)).thenReturn(launchedQueries.size());
+
+    QueryContext mockCandidateQuery = mock(QueryContext.class);
+    when(mockCandidateQuery.getQueue()).thenReturn(candidateQueue);
+    when(mockCandidateQuery.getSelectedDriver()).thenReturn(mockDriver);
+    boolean actualCanLaunch = perQueueConstraint.allowsLaunchOf(mockCandidateQuery, mockLaunchedQueries);
+
+    assertEquals(actualCanLaunch, expectedCanLaunch);
+  }
+
+  @Test(dataProvider = "dpTestPerPriorityConstraints")
+  public void testPerPriorityConstraints(final Priority[] launchedPriorities, final Priority candidatePriority,
+    final boolean expectedCanLaunch) {
+    EstimatedImmutableQueryCollection mockLaunchedQueries = mock(EstimatedImmutableQueryCollection.class);
+    LensDriver mockDriver = mock(LensDriver.class);
+    Set<QueryContext> launchedQueries = new HashSet<>();
+    for (Priority priority : launchedPriorities) {
+      QueryContext context = mock(QueryContext.class);
+      when(context.getPriority()).thenReturn(priority);
+      launchedQueries.add(context);
+    }
+    when(mockLaunchedQueries.getQueries(mockDriver)).thenReturn(launchedQueries);
+    when(mockLaunchedQueries.getQueriesCount(mockDriver)).thenReturn(launchedQueries.size());
+
+    QueryContext mockCandidateQuery = mock(QueryContext.class);
+    when(mockCandidateQuery.getPriority()).thenReturn(candidatePriority);
+    when(mockCandidateQuery.getSelectedDriver()).thenReturn(mockDriver);
+    boolean actualCanLaunch = perPriorityConstraint.allowsLaunchOf(mockCandidateQuery, mockLaunchedQueries);
+
+    assertEquals(actualCanLaunch, expectedCanLaunch);
+  }
+
+  @Test(dataProvider = "dpTestPerQueuePerPriorityConstraints")
+  public void testPerQueuePerPriorityConstraints(final QueuePriority[] launchedQueuePriorities,
+    final String candidateQueue, final Priority candidatePriority, final boolean expectedCanLaunch) {
+    EstimatedImmutableQueryCollection mockLaunchedQueries = mock(EstimatedImmutableQueryCollection.class);
+    LensDriver mockDriver = mock(LensDriver.class);
+    Set<QueryContext> launchedQueries = new HashSet<>();
+    for (QueuePriority queuePriority : launchedQueuePriorities) {
+      QueryContext context = mock(QueryContext.class);
+      when(context.getQueue()).thenReturn(queuePriority.getQueue());
+      when(context.getPriority()).thenReturn(queuePriority.getPriority());
+      launchedQueries.add(context);
+    }
+    when(mockLaunchedQueries.getQueries(mockDriver)).thenReturn(launchedQueries);
+    when(mockLaunchedQueries.getQueriesCount(mockDriver)).thenReturn(launchedQueries.size());
+
+    QueryContext mockCandidateQuery = mock(QueryContext.class);
+    when(mockCandidateQuery.getQueue()).thenReturn(candidateQueue);
+    when(mockCandidateQuery.getPriority()).thenReturn(candidatePriority);
+    when(mockCandidateQuery.getSelectedDriver()).thenReturn(mockDriver);
+    boolean actualCanLaunch = perQueueAndPerPriorityConstraint.allowsLaunchOf(mockCandidateQuery, mockLaunchedQueries);
+
+    assertEquals(actualCanLaunch, expectedCanLaunch);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index ffd2d42..2dff9af 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -1734,6 +1734,7 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
 
     ctx.setLensSessionIdentifier(sessionHandle.getPublicId().toString());
     rewriteAndSelect(ctx);
+    ctx.getSelectedDriver().decidePriority(ctx);
     return submitQuery(ctx);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/4d3d2f82/lens-server/src/test/java/org/apache/lens/server/query/constraint/ThreadSafeEstimatedQueryCollectionTest.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/constraint/ThreadSafeEstimatedQueryCollectionTest.java b/lens-server/src/test/java/org/apache/lens/server/query/constraint/ThreadSafeEstimatedQueryCollectionTest.java
index 9138f8e..e1bf350 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/constraint/ThreadSafeEstimatedQueryCollectionTest.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/constraint/ThreadSafeEstimatedQueryCollectionTest.java
@@ -21,7 +21,6 @@ package org.apache.lens.server.query.constraint;
 
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
-
 import static org.testng.Assert.assertEquals;
 
 import org.apache.lens.server.api.driver.LensDriver;
@@ -53,7 +52,7 @@ public class ThreadSafeEstimatedQueryCollectionTest {
     LensDriver mockDriver = mock(LensDriver.class);
     LensDriver mockDriver2 = mock(LensDriver.class);
 
-    QueryLaunchingConstraint constraint = new MaxConcurrentDriverQueriesConstraint(maxConcurrentQueries);
+    QueryLaunchingConstraint constraint = new MaxConcurrentDriverQueriesConstraint(maxConcurrentQueries, null, null);
     ThreadSafeEstimatedQueryCollection col = new ThreadSafeEstimatedQueryCollection(new
       DefaultEstimatedQueryCollection(new DefaultQueryCollection()));
 


[26/50] [abbrv] lens git commit: LENS-884 : Fix lens-ship-jars.jar to have version in the assembly as well

Posted by sh...@apache.org.
LENS-884 : Fix lens-ship-jars.jar to have version in the assembly as well


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/404d4518
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/404d4518
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/404d4518

Branch: refs/heads/LENS-581
Commit: 404d4518d9cf96d1a2c41a73da598fc27fece992
Parents: 942f071
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Fri Nov 27 12:46:54 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Fri Nov 27 12:46:54 2015 +0530

----------------------------------------------------------------------
 lens-dist/src/main/assembly/bin-dist.xml |  1 -
 lens-ship-jars/pom.xml                   | 19 -------------------
 pom.xml                                  | 22 ++++++++++++++++++++++
 3 files changed, 22 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/404d4518/lens-dist/src/main/assembly/bin-dist.xml
----------------------------------------------------------------------
diff --git a/lens-dist/src/main/assembly/bin-dist.xml b/lens-dist/src/main/assembly/bin-dist.xml
index 535e991..dede085 100644
--- a/lens-dist/src/main/assembly/bin-dist.xml
+++ b/lens-dist/src/main/assembly/bin-dist.xml
@@ -411,7 +411,6 @@
     <file>
       <source>../lens-ship-jars/target/lens-ship-jars-${project.version}.jar</source>
       <outputDirectory>/server/shipjars/</outputDirectory>
-      <destName>lens-ship-jars.jar</destName>
     </file>
   </files>
 

http://git-wip-us.apache.org/repos/asf/lens/blob/404d4518/lens-ship-jars/pom.xml
----------------------------------------------------------------------
diff --git a/lens-ship-jars/pom.xml b/lens-ship-jars/pom.xml
index b150980..ece22a1 100644
--- a/lens-ship-jars/pom.xml
+++ b/lens-ship-jars/pom.xml
@@ -81,26 +81,7 @@
             </filter>
           </filters>
         </configuration>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>shade</goal>
-            </goals>
-            <configuration>
-              <transformers>
-                <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
-                <transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
-                  <resource>log4j.properties</resource>
-                </transformer>
-                <transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer" />
-                <transformer implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer" />
-              </transformers>
-            </configuration>
-          </execution>
-        </executions>
       </plugin>
     </plugins>
   </build>
-
 </project>

http://git-wip-us.apache.org/repos/asf/lens/blob/404d4518/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index b19857f..732b3bb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -567,6 +567,28 @@
             </tasks>
           </configuration>
         </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-shade-plugin</artifactId>
+          <executions>
+            <execution>
+              <phase>package</phase>
+              <goals>
+                <goal>shade</goal>
+              </goals>
+              <configuration>
+                <transformers>
+                  <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
+                  <transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
+                    <resource>log4j.properties</resource>
+                  </transformer>
+                  <transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer" />
+                  <transformer implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer" />
+                </transformers>
+              </configuration>
+            </execution>
+          </executions>
+        </plugin>
       </plugins>
     </pluginManagement>
     <plugins>


[31/50] [abbrv] lens git commit: LENS-888 : Fix limit of 4000 on total string length of dim attribute names of derived cube

Posted by sh...@apache.org.
LENS-888 : Fix limit of 4000 on total string length of dim attribute names of derived cube


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/22e20220
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/22e20220
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/22e20220

Branch: refs/heads/LENS-581
Commit: 22e202208cf6e7f216321f53a5cdc2cbdc113801
Parents: b63e06c
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Thu Dec 10 11:10:08 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Thu Dec 10 11:10:08 2015 +0530

----------------------------------------------------------------------
 .../apache/lens/cube/metadata/DerivedCube.java  | 27 +++++++++++--------
 .../lens/cube/metadata/MetastoreUtil.java       | 16 ++++++++++-
 .../cube/metadata/TestCubeMetastoreClient.java  | 28 +++++++++++++++++---
 3 files changed, 56 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/22e20220/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java
index 3c30f78..681aa7b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java
@@ -137,16 +137,23 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
   @Override
   public void addProperties() {
     super.addProperties();
-    getProperties().put(MetastoreUtil.getCubeMeasureListKey(getName()), StringUtils.join(measures, ",").toLowerCase());
-    getProperties().put(MetastoreUtil.getCubeDimensionListKey(getName()),
-      StringUtils.join(dimensions, ",").toLowerCase());
+    updateMeasureProperties();
+    updateDimAttributeProperties();
     getProperties().put(MetastoreUtil.getParentCubeNameKey(getName()), parent.getName().toLowerCase());
     getProperties().put(MetastoreUtil.getParentCubeNameKey(getName()), parent.getName().toLowerCase());
   }
+  public void updateDimAttributeProperties() {
+    MetastoreUtil.addNameStrings(getProperties(), MetastoreUtil.getCubeDimensionListKey(getName()),
+      MetastoreUtil.getNamedSetFromStringSet(dimensions));
+  }
+  public void updateMeasureProperties() {
+    MetastoreUtil.addNameStrings(getProperties(), MetastoreUtil.getCubeMeasureListKey(getName()),
+      MetastoreUtil.getNamedSetFromStringSet(measures));
+  }
 
   public static Set<String> getMeasures(String name, Map<String, String> props) {
     Set<String> measures = new HashSet<String>();
-    String measureStr = props.get(MetastoreUtil.getCubeMeasureListKey(name));
+    String measureStr = MetastoreUtil.getNamedStringValue(props, MetastoreUtil.getCubeMeasureListKey(name));
     measures.addAll(Arrays.asList(StringUtils.split(measureStr, ',')));
     return measures;
   }
@@ -164,7 +171,7 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
 
   public static Set<String> getDimensions(String name, Map<String, String> props) {
     Set<String> dimensions = new HashSet<String>();
-    String dimStr = props.get(MetastoreUtil.getCubeDimensionListKey(name));
+    String dimStr = MetastoreUtil.getNamedStringValue(props, MetastoreUtil.getCubeDimensionListKey(name));
     dimensions.addAll(Arrays.asList(StringUtils.split(dimStr, ',')));
     return dimensions;
   }
@@ -234,7 +241,7 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
    */
   public void addMeasure(String measure) throws HiveException {
     measures.add(measure.toLowerCase());
-    getProperties().put(MetastoreUtil.getCubeMeasureListKey(getName()), StringUtils.join(measures, ",").toLowerCase());
+    updateMeasureProperties();
   }
 
   /**
@@ -245,8 +252,7 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
    */
   public void addDimension(String dimension) throws HiveException {
     dimensions.add(dimension.toLowerCase());
-    getProperties().put(MetastoreUtil.getCubeDimensionListKey(getName()),
-      StringUtils.join(dimensions, ",").toLowerCase());
+    updateDimAttributeProperties();
   }
 
   /**
@@ -256,8 +262,7 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
    */
   public void removeDimension(String dimName) {
     dimensions.remove(dimName.toLowerCase());
-    getProperties().put(MetastoreUtil.getCubeDimensionListKey(getName()),
-      StringUtils.join(dimensions, ",").toLowerCase());
+    updateDimAttributeProperties();
   }
 
   /**
@@ -267,7 +272,7 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
    */
   public void removeMeasure(String msrName) {
     measures.remove(msrName.toLowerCase());
-    getProperties().put(MetastoreUtil.getCubeMeasureListKey(getName()), StringUtils.join(measures, ",").toLowerCase());
+    updateMeasureProperties();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lens/blob/22e20220/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
index 4b57d95..4ec049c 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
@@ -28,6 +28,8 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 
+import com.google.common.collect.Sets;
+
 public class MetastoreUtil {
   private MetastoreUtil() {
 
@@ -394,11 +396,23 @@ public class MetastoreUtil {
 
   private static final int MAX_PARAM_LENGTH = 3999;
 
+  public static Set<Named> getNamedSetFromStringSet(Set<String> strings) {
+    Set<Named> nameds = Sets.newHashSet();
+    for(final String s: strings) {
+      nameds.add(new Named() {
+        @Override
+        public String getName() {
+          return s;
+        }
+      });
+    }
+    return nameds;
+  }
   public static <E extends Named> void addNameStrings(Map<String, String> props, String key, Collection<E> set) {
     addNameStrings(props, key, set, MAX_PARAM_LENGTH);
   }
 
-  static <E extends Named> void addNameStrings(Map<String, String> props, String key,
+  public static <E extends Named> void addNameStrings(Map<String, String> props, String key,
     Collection<E> set, int maxLength) {
     List<String> namedStrings = getNamedStrs(set, maxLength);
     props.put(key + ".size", String.valueOf(namedStrings.size()));

http://git-wip-us.apache.org/repos/asf/lens/blob/22e20220/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index e415a5a..e5dbde7 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -44,7 +44,10 @@ import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.*;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
@@ -176,7 +179,11 @@ public class TestCubeMetastoreClient {
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msrcost2", "bigint", "measure with cost"),
       "Measure With cost2", null, "MAX", null, null, null, 100.0, 0.0, 999999999999999999999999999.0));
-
+    Set<CubeMeasure> dummyMeasure = Sets.newHashSet();
+    for (int i = 0; i < 5000; i++) {
+      dummyMeasure.add(new ColumnMeasure(new FieldSchema("dummy_msr" + i, "bigint", "dummy measure " + i),
+        "", null, "SUM", null, null, null, 100.0, 0.0, 999999999999999999999999999.0));
+    }
     cubeDimensions = new HashSet<>();
     List<CubeDimAttribute> locationHierarchy = new ArrayList<>();
     locationHierarchy.add(new ReferencedDimAtrribute(new FieldSchema("zipcode", "int", "zip"), "Zip refer",
@@ -194,6 +201,11 @@ public class TestCubeMetastoreClient {
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("dim1", "string", "basedim")));
     cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim2", "id", "ref dim"), "Dim2 refer",
       new TableReference("testdim2", "id")));
+    Set<CubeDimAttribute> dummyDimAttributes = Sets.newHashSet();
+    for (int i = 0; i < 5000; i++) {
+      dummyDimAttributes.add(new BaseDimAttribute(new FieldSchema("dummy_dim" + i, "string", "dummy dim " + i),
+        "dummy_dim" + i, null, null, null, null, regions));
+    }
 
     ExprSpec expr1 = new ExprSpec();
     expr1.setExpr("avg(msr1 + msr2)");
@@ -276,9 +288,17 @@ public class TestCubeMetastoreClient {
     joinChains.add(cityChain);
     cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("zipcityname", "string", "zip city name"),
       "Zip city name", "cityFromZip", "name", null, null, null));
+    cubeMeasures.addAll(dummyMeasure);
+    cubeDimensions.addAll(dummyDimAttributes);
     cube = new Cube(cubeName, cubeMeasures, cubeDimensions, cubeExpressions, joinChains, emptyHashMap, 0.0);
     measures = Sets.newHashSet("msr1", "msr2", "msr3");
+    for(CubeMeasure measure: dummyMeasure) {
+      measures.add(measure.getName());
+    }
     dimensions = Sets.newHashSet("dim1", "dim2", "dim3");
+    for(CubeDimAttribute dimAttribute: dummyDimAttributes) {
+      dimensions.add(dimAttribute.getName());
+    }
     derivedCube = new DerivedCube(derivedCubeName, measures, dimensions, cube);
 
     CUBE_PROPERTIES.put(MetastoreUtil.getCubeTimedDimensionListKey(cubeNameWithProps), "dt,mydate");
@@ -1292,16 +1312,18 @@ public class TestCubeMetastoreClient {
         "complete name differs at element " + i);
     }
   }
+
   private void assertTimeline(EndsAndHolesPartitionTimeline endsAndHolesPartitionTimeline,
     StoreAllPartitionTimeline storeAllPartitionTimeline, UpdatePeriod updatePeriod,
     int firstOffset, int latestOffset, int... holeOffsets) throws LensException {
     Date[] holeDates = new Date[holeOffsets.length];
-    for(int i = 0; i < holeOffsets.length; i++) {
+    for (int i = 0; i < holeOffsets.length; i++) {
       holeDates[i] = getDateWithOffset(holeOffsets[i]);
     }
     assertTimeline(endsAndHolesPartitionTimeline, storeAllPartitionTimeline, updatePeriod,
       getDateWithOffset(firstOffset), getDateWithOffset(latestOffset), holeDates);
   }
+
   private void assertTimeline(EndsAndHolesPartitionTimeline endsAndHolesPartitionTimeline,
     StoreAllPartitionTimeline storeAllPartitionTimeline, UpdatePeriod updatePeriod,
     Date first, Date latest, Date... holes) throws LensException {


[21/50] [abbrv] lens git commit: LENS-871 : Fix Dropping any partition in dimtable is clearing latest cache for that dimtable.

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index d938ff7..e415a5a 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -19,12 +19,12 @@
 
 package org.apache.lens.cube.metadata;
 
-import static org.apache.lens.cube.metadata.UpdatePeriod.DAILY;
-import static org.apache.lens.cube.metadata.UpdatePeriod.HOURLY;
-import static org.apache.lens.cube.metadata.UpdatePeriod.MONTHLY;
+import static org.apache.lens.cube.metadata.MetastoreUtil.*;
+import static org.apache.lens.cube.metadata.UpdatePeriod.*;
+import static org.apache.lens.cube.parse.CubeTestSetup.DateOffsetProvider;
+import static org.apache.lens.server.api.util.LensUtil.getHashMap;
 
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.fail;
+import static org.testng.Assert.*;
 
 import java.util.*;
 
@@ -37,6 +37,7 @@ import org.apache.lens.cube.metadata.timeline.StoreAllPartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.TestPartitionTimelines;
 import org.apache.lens.cube.parse.TimeRange;
 import org.apache.lens.server.api.error.LensException;
+import org.apache.lens.server.api.util.LensUtil;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
@@ -50,13 +51,14 @@ import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 
-import org.testng.Assert;
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
 
+import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 
 public class TestCubeMetastoreClient {
 
@@ -71,35 +73,35 @@ public class TestCubeMetastoreClient {
   private static Set<String> dimensions;
   private static Set<CubeMeasure> cubeMeasures;
   private static Set<CubeDimAttribute> cubeDimensions;
+  private static Set<UpdatePeriod> hourlyAndDaily = Sets.newHashSet(HOURLY, DAILY);
   private static final String CUBE_NAME = "testMetastoreCube";
   private static final String CUBE_NAME_WITH_PROPS = "testMetastoreCubeWithProps";
   private static final String DERIVED_CUBE_NAME = "derivedTestMetastoreCube";
   private static final String DERIVED_CUBE_NAME_WITH_PROPS = "derivedTestMetastoreCubeWithProps";
-  private static final Map<String, String> CUBE_PROPERTIES = new HashMap<String, String>();
-  private static Date now;
-  private static Date nowPlus1;
-  private static Date nowPlus2;
-  private static Date nowPlus3;
-  private static Date nowMinus1;
-  private static Date nowMinus2;
-  private static Date nowMinus3;
-  private static Date nowMinus4;
-  private static Date nowMinus5;
+  private static final Map<String, String> CUBE_PROPERTIES = new HashMap<>();
   private static HiveConf conf = new HiveConf(TestCubeMetastoreClient.class);
   private static FieldSchema dtPart = new FieldSchema(getDatePartitionKey(), serdeConstants.STRING_TYPE_NAME,
     "date partition");
+  private static ArrayList<String> datePartKeySingleton = Lists.newArrayList(getDatePartitionKey());
+  private static ArrayList<FieldSchema> datePartSingleton = Lists.newArrayList(getDatePartition());
+  private static Map<String, String> emptyHashMap = ImmutableMap.copyOf(LensUtil.<String, String>getHashMap());
   private static String c1 = "C1";
   private static String c2 = "C2";
   private static String c3 = "C3";
   private static String c4 = "C4";
   private static Dimension zipDim, cityDim, stateDim, countryDim;
-  private static Set<CubeDimAttribute> zipAttrs = new HashSet<CubeDimAttribute>();
-  private static Set<CubeDimAttribute> cityAttrs = new HashSet<CubeDimAttribute>();
-  private static Set<CubeDimAttribute> stateAttrs = new HashSet<CubeDimAttribute>();
-  private static Set<CubeDimAttribute> countryAttrs = new HashSet<CubeDimAttribute>();
-  private static Set<ExprColumn> cubeExpressions = new HashSet<ExprColumn>();
-  private static Set<JoinChain> joinChains = new HashSet<JoinChain>();
-  private static Set<ExprColumn> dimExpressions = new HashSet<ExprColumn>();
+  private static Set<CubeDimAttribute> zipAttrs = new HashSet<>();
+  private static Set<CubeDimAttribute> cityAttrs = new HashSet<>();
+  private static Set<CubeDimAttribute> stateAttrs = new HashSet<>();
+  private static Set<CubeDimAttribute> countryAttrs = new HashSet<>();
+  private static Set<ExprColumn> cubeExpressions = new HashSet<>();
+  private static Set<JoinChain> joinChains = new HashSet<>();
+  private static Set<ExprColumn> dimExpressions = new HashSet<>();
+  private static DateOffsetProvider dateOffsetProvider = new DateOffsetProvider(HOURLY);
+
+  public static Date getDateWithOffset(int i) {
+    return dateOffsetProvider.get(i);
+  }
 
   /**
    * Get the date partition as field schema
@@ -107,7 +109,7 @@ public class TestCubeMetastoreClient {
    * @return FieldSchema
    */
   public static FieldSchema getDatePartition() {
-    return TestCubeMetastoreClient.dtPart;
+    return dtPart;
   }
 
   /**
@@ -119,30 +121,18 @@ public class TestCubeMetastoreClient {
     return StorageConstants.DATE_PARTITION_KEY;
   }
 
+  private static HashMap<String, Date> getTimePartitionByOffsets(Object... args) {
+    for (int i = 1; i < args.length; i += 2) {
+      if (args[i] instanceof Integer) {
+        args[i] = getDateWithOffset((Integer) args[i]);
+      }
+    }
+    return getHashMap(args);
+  }
+
   @BeforeClass
   public static void setup() throws HiveException, AlreadyExistsException, LensException {
     SessionState.start(conf);
-    now = new Date();
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(now);
-    cal.add(Calendar.HOUR_OF_DAY, 1);
-    nowPlus1 = cal.getTime();
-    cal.add(Calendar.HOUR_OF_DAY, 1);
-    nowPlus2 = cal.getTime();
-    cal.add(Calendar.HOUR_OF_DAY, 1);
-    nowPlus3 = cal.getTime();
-    cal.setTime(now);
-    cal.add(Calendar.HOUR, -1);
-    nowMinus1 = cal.getTime();
-    cal.add(Calendar.HOUR, -1);
-    nowMinus2 = cal.getTime();
-    cal.add(Calendar.HOUR, -1);
-    nowMinus3 = cal.getTime();
-    cal.add(Calendar.HOUR, -1);
-    nowMinus4 = cal.getTime();
-    cal.add(Calendar.HOUR, -1);
-    nowMinus5 = cal.getTime();
-
     Database database = new Database();
     database.setName(TestCubeMetastoreClient.class.getSimpleName());
     Hive.get(conf).createDatabase(database);
@@ -157,7 +147,7 @@ public class TestCubeMetastoreClient {
     // Drop the cube
     client.dropCube(CUBE_NAME);
     client = CubeMetastoreClient.getInstance(conf);
-    Assert.assertFalse(client.tableExists(CUBE_NAME));
+    assertFalse(client.tableExists(CUBE_NAME));
 
     Hive.get().dropDatabase(TestCubeMetastoreClient.class.getSimpleName(), true, true, true);
     CubeMetastoreClient.close();
@@ -165,7 +155,7 @@ public class TestCubeMetastoreClient {
 
   private static void defineCube(String cubeName, String cubeNameWithProps, String derivedCubeName,
     String derivedCubeNameWithProps) throws LensException {
-    cubeMeasures = new HashSet<CubeMeasure>();
+    cubeMeasures = new HashSet<>();
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msr1", "int", "first measure"), null, null, null, null, null, null, null, 0.0, 9999.0));
     cubeMeasures.add(new ColumnMeasure(
@@ -176,19 +166,19 @@ public class TestCubeMetastoreClient {
       new FieldSchema("msr4", "bigint", "fourth measure"), "Measure4", null, "COUNT", null));
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msrstarttime", "int", "measure with start time"),
-      "Measure With Starttime", null, null, null, now, null, null, 0.0, 999999.0));
+      "Measure With Starttime", null, null, null, getDateWithOffset(0), null, null, 0.0, 999999.0));
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msrendtime", "float", "measure with end time"),
-      "Measure With Endtime", null, "SUM", "RS", now, now, null));
+      "Measure With Endtime", null, "SUM", "RS", getDateWithOffset(0), getDateWithOffset(0), null));
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msrcost", "double", "measure with cost"), "Measure With cost",
-      null, "MAX", null, now, now, 100.0));
+      null, "MAX", null, getDateWithOffset(0), getDateWithOffset(0), 100.0));
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msrcost2", "bigint", "measure with cost"),
       "Measure With cost2", null, "MAX", null, null, null, 100.0, 0.0, 999999999999999999999999999.0));
 
-    cubeDimensions = new HashSet<CubeDimAttribute>();
-    List<CubeDimAttribute> locationHierarchy = new ArrayList<CubeDimAttribute>();
+    cubeDimensions = new HashSet<>();
+    List<CubeDimAttribute> locationHierarchy = new ArrayList<>();
     locationHierarchy.add(new ReferencedDimAtrribute(new FieldSchema("zipcode", "int", "zip"), "Zip refer",
       new TableReference("zipdim", "zipcode")));
     locationHierarchy.add(new ReferencedDimAtrribute(new FieldSchema("cityid", "int", "city"), "City refer",
@@ -230,13 +220,14 @@ public class TestCubeMetastoreClient {
     cubeExpressions.add(new ExprColumn(new FieldSchema("substrexpr", "string", "a subt string expression"),
       "SUBSTR EXPR", expr1, expr2));
 
-    List<CubeDimAttribute> locationHierarchyWithStartTime = new ArrayList<CubeDimAttribute>();
+    List<CubeDimAttribute> locationHierarchyWithStartTime = new ArrayList<>();
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("zipcode2", "int", "zip"),
-      "Zip refer2", new TableReference("zipdim", "zipcode"), now, now, 100.0, true, 1000L));
+      "Zip refer2", new TableReference("zipdim", "zipcode"), getDateWithOffset(0), getDateWithOffset(0),
+      100.0, true, 1000L));
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("cityid2", "int", "city"),
-      "City refer2", new TableReference("citydim", "id"), now, null, null));
+      "City refer2", new TableReference("citydim", "id"), getDateWithOffset(0), null, null));
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("stateid2", "int", "state"),
-      "state refer2", new TableReference("statedim", "id"), now, null, 100.0));
+      "state refer2", new TableReference("statedim", "id"), getDateWithOffset(0), null, 100.0));
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("countryid2", "int", "country"),
       "Country refer2", new TableReference("countrydim", "id"), null, null, null));
     locationHierarchyWithStartTime.add(new BaseDimAttribute(new FieldSchema("regionname2", "string", "region"),
@@ -245,11 +236,11 @@ public class TestCubeMetastoreClient {
     cubeDimensions
       .add(new HierarchicalDimAttribute("location2", "localtion hierarchy2", locationHierarchyWithStartTime));
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("dim1startTime", "string", "basedim"),
-      "Dim With starttime", now, null, 100.0));
+      "Dim With starttime", getDateWithOffset(0), null, 100.0));
     cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim2start", "string", "ref dim"),
-      "Dim2 with starttime", new TableReference("testdim2", "id"), now, now, 100.0));
+      "Dim2 with starttime", new TableReference("testdim2", "id"), getDateWithOffset(0), getDateWithOffset(0), 100.0));
 
-    List<TableReference> multiRefs = new ArrayList<TableReference>();
+    List<TableReference> multiRefs = new ArrayList<>();
     multiRefs.add(new TableReference("testdim2", "id"));
     multiRefs.add(new TableReference("testdim3", "id"));
     multiRefs.add(new TableReference("testdim4", "id"));
@@ -257,20 +248,20 @@ public class TestCubeMetastoreClient {
     cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim3", "string", "multi ref dim"), "Dim3 refer",
       multiRefs));
     cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim3start", "string", "multi ref dim"),
-      "Dim3 with starttime", multiRefs, now, null, 100.0));
+      "Dim3 with starttime", multiRefs, getDateWithOffset(0), null, 100.0));
 
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("region", "string", "region dim"), "region", null, null,
       null, null, regions));
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("regionstart", "string", "region dim"),
-      "Region with starttime", now, null, 100.0, null, regions));
+      "Region with starttime", getDateWithOffset(0), null, 100.0, null, regions));
     JoinChain zipCity = new JoinChain("cityFromZip", "Zip City", "zip city desc");
-    List<TableReference> chain = new ArrayList<TableReference>();
+    List<TableReference> chain = new ArrayList<>();
     chain.add(new TableReference(cubeName, "zipcode"));
     chain.add(new TableReference("zipdim", "zipcode"));
     chain.add(new TableReference("zipdim", "cityid"));
     chain.add(new TableReference("citydim", "id"));
     zipCity.addPath(chain);
-    List<TableReference> chain2 = new ArrayList<TableReference>();
+    List<TableReference> chain2 = new ArrayList<>();
     chain2.add(new TableReference(cubeName, "zipcode2"));
     chain2.add(new TableReference("zipdim", "zipcode"));
     chain2.add(new TableReference("zipdim", "cityid"));
@@ -278,23 +269,16 @@ public class TestCubeMetastoreClient {
     zipCity.addPath(chain2);
     joinChains.add(zipCity);
     JoinChain cityChain = new JoinChain("city", "Cube City", "cube city desc");
-    chain = new ArrayList<TableReference>();
+    chain = new ArrayList<>();
     chain.add(new TableReference(cubeName, "cityid"));
     chain.add(new TableReference("citydim", "id"));
     cityChain.addPath(chain);
     joinChains.add(cityChain);
     cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("zipcityname", "string", "zip city name"),
       "Zip city name", "cityFromZip", "name", null, null, null));
-    cube = new Cube(cubeName, cubeMeasures, cubeDimensions, cubeExpressions, joinChains,
-      new HashMap<String, String>(), 0.0);
-    measures = new HashSet<String>();
-    measures.add("msr1");
-    measures.add("msr2");
-    measures.add("msr3");
-    dimensions = new HashSet<String>();
-    dimensions.add("dim1");
-    dimensions.add("dim2");
-    dimensions.add("dim3");
+    cube = new Cube(cubeName, cubeMeasures, cubeDimensions, cubeExpressions, joinChains, emptyHashMap, 0.0);
+    measures = Sets.newHashSet("msr1", "msr2", "msr3");
+    dimensions = Sets.newHashSet("dim1", "dim2", "dim3");
     derivedCube = new DerivedCube(derivedCubeName, measures, dimensions, cube);
 
     CUBE_PROPERTIES.put(MetastoreUtil.getCubeTimedDimensionListKey(cubeNameWithProps), "dt,mydate");
@@ -310,9 +294,8 @@ public class TestCubeMetastoreClient {
     zipAttrs.add(new BaseDimAttribute(new FieldSchema("zipcode", "int", "code")));
     zipAttrs.add(new BaseDimAttribute(new FieldSchema("f1", "string", "field1")));
     zipAttrs.add(new BaseDimAttribute(new FieldSchema("f2", "string", "field1")));
-    List<TableReference> stateRefs = new ArrayList<TableReference>();
-    stateRefs.add(new TableReference("statedim", "id"));
-    stateRefs.add(new TableReference("stateWeatherDim", "id"));
+    List<TableReference> stateRefs = Lists.newArrayList(new TableReference("statedim", "id"),
+      new TableReference("stateWeatherDim", "id"));
     zipAttrs.add(new ReferencedDimAtrribute(new FieldSchema("stateid", "int", "state id"), "State refer", stateRefs));
     zipAttrs.add(new ReferencedDimAtrribute(new FieldSchema("cityid", "int", "city id"), "City refer",
       new TableReference("citydim", "id")));
@@ -330,8 +313,7 @@ public class TestCubeMetastoreClient {
       new ExprSpec("state_and_country", null, null)));
     dimExpressions.add(new ExprColumn(new FieldSchema("CityAddress", "string", "city with state and city and zip"),
       "City Address", "concat(citydim.name, \":\", statedim.name, \":\", countrydim.name, \":\", zipcode.code)"));
-    Map<String, String> dimProps = new HashMap<String, String>();
-    dimProps.put(MetastoreUtil.getDimTimedDimensionKey("citydim"), TestCubeMetastoreClient.getDatePartitionKey());
+    Map<String, String> dimProps = getHashMap(getDimTimedDimensionKey("citydim"), getDatePartitionKey());
     cityDim = new Dimension("citydim", cityAttrs, dimExpressions, dimProps, 0L);
 
     // Define state table
@@ -354,19 +336,19 @@ public class TestCubeMetastoreClient {
   public void testStorage() throws Exception {
     Storage hdfsStorage = new HDFSStorage(c1);
     client.createStorage(hdfsStorage);
-    assertEquals(1, client.getAllStorages().size());
+    assertEquals(client.getAllStorages().size(), 1);
 
     Storage hdfsStorage2 = new HDFSStorage(c2);
     client.createStorage(hdfsStorage2);
-    assertEquals(2, client.getAllStorages().size());
+    assertEquals(client.getAllStorages().size(), 2);
 
     Storage hdfsStorage3 = new HDFSStorage(c3);
     client.createStorage(hdfsStorage3);
-    assertEquals(3, client.getAllStorages().size());
+    assertEquals(client.getAllStorages().size(), 3);
 
     Storage hdfsStorage4 = new HDFSStorage(c4);
     client.createStorage(hdfsStorage4);
-    assertEquals(4, client.getAllStorages().size());
+    assertEquals(client.getAllStorages().size(), 4);
 
     assertEquals(hdfsStorage, client.getStorage(c1));
     assertEquals(hdfsStorage2, client.getStorage(c2));
@@ -382,9 +364,9 @@ public class TestCubeMetastoreClient {
     client.createDimension(countryDim);
 
     assertEquals(client.getAllDimensions().size(), 4);
-    Assert.assertTrue(client.tableExists(cityDim.getName()));
-    Assert.assertTrue(client.tableExists(stateDim.getName()));
-    Assert.assertTrue(client.tableExists(countryDim.getName()));
+    assertTrue(client.tableExists(cityDim.getName()));
+    assertTrue(client.tableExists(stateDim.getName()));
+    assertTrue(client.tableExists(countryDim.getName()));
 
     validateDim(zipDim, zipAttrs, "zipcode", "stateid");
     validateDim(cityDim, cityAttrs, "id", "stateid");
@@ -395,8 +377,8 @@ public class TestCubeMetastoreClient {
     Dimension city = client.getDimension(cityDim.getName());
     assertEquals(dimExpressions.size(), city.getExpressions().size());
     assertEquals(dimExpressions.size(), city.getExpressionNames().size());
-    Assert.assertNotNull(city.getExpressionByName("stateAndCountry"));
-    Assert.assertNotNull(city.getExpressionByName("cityaddress"));
+    assertNotNull(city.getExpressionByName("stateAndCountry"));
+    assertNotNull(city.getExpressionByName("cityaddress"));
     assertEquals(city.getExpressionByName("cityaddress").getDescription(), "city with state and city and zip");
     assertEquals(city.getExpressionByName("cityaddress").getDisplayString(), "City Address");
 
@@ -412,7 +394,7 @@ public class TestCubeMetastoreClient {
       expr1 = new ExprSpec();
       expr1.setExpr("contact(countrydim.name");
       stateCountryExpr.addExpression(expr1);
-      Assert.fail("Expected add expression to fail because of syntax error");
+      fail("Expected add expression to fail because of syntax error");
     } catch (ParseException exc) {
       // Pass
     }
@@ -424,13 +406,13 @@ public class TestCubeMetastoreClient {
     assertEquals(1, city.getExpressions().size());
 
     ExprColumn stateAndCountryActual = city.getExpressionByName("stateAndCountry");
-    Assert.assertNotNull(stateAndCountryActual.getExpressions());
+    assertNotNull(stateAndCountryActual.getExpressions());
     assertEquals(2, stateAndCountryActual.getExpressions().size());
-    Assert.assertTrue(stateAndCountryActual.getExpressions().contains("concat(statedim.name, \"-\", countrydim.name)"));
-    Assert.assertTrue(stateAndCountryActual.getExpressions()
+    assertTrue(stateAndCountryActual.getExpressions().contains("concat(statedim.name, \"-\", countrydim.name)"));
+    assertTrue(stateAndCountryActual.getExpressions()
       .contains("concat(countrydim.name, \"-\", countrydim.name)"));
 
-    Assert.assertNotNull(city.getExpressionByName("stateAndCountry"));
+    assertNotNull(city.getExpressionByName("stateAndCountry"));
     assertEquals(city.getExpressionByName("stateAndCountry").getExpr(),
       "concat(statedim.name, \"-\", countrydim.name)");
 
@@ -441,7 +423,7 @@ public class TestCubeMetastoreClient {
     assertEquals(1, cityAltered.getExpressionByName("stateAndCountry").getExpressions().size());
 
 
-    List<TableReference> chain = new ArrayList<TableReference>();
+    List<TableReference> chain = new ArrayList<>();
     chain.add(new TableReference("zipdim", "cityid"));
     chain.add(new TableReference("citydim", "id"));
     chain.add(new TableReference("citydim", "stateid"));
@@ -458,8 +440,7 @@ public class TestCubeMetastoreClient {
     toAlter.alterAttribute(new ReferencedDimAtrribute(new FieldSchema("newRefDim", "int", "new ref-dim added"),
       "New city ref", new TableReference("citydim", "id")));
     toAlter.alterAttribute(new BaseDimAttribute(new FieldSchema("f2", "varchar", "modified field")));
-    List<TableReference> stateRefs = new ArrayList<TableReference>();
-    stateRefs.add(new TableReference("statedim", "id"));
+    List<TableReference> stateRefs = Lists.newArrayList(new TableReference("statedim", "id"));
     toAlter.alterAttribute(new ReferencedDimAtrribute(new FieldSchema("stateid", "int", "state id"), "State refer",
       stateRefs));
     toAlter.removeAttribute("f1");
@@ -473,32 +454,32 @@ public class TestCubeMetastoreClient {
 
 
     assertEquals(toAlter, altered);
-    Assert.assertNotNull(altered.getAttributeByName("newZipDim"));
-    Assert.assertNotNull(altered.getAttributeByName("newRefDim"));
-    Assert.assertNotNull(altered.getAttributeByName("f2"));
-    Assert.assertNotNull(altered.getAttributeByName("stateid"));
-    Assert.assertNull(altered.getAttributeByName("f1"));
+    assertNotNull(altered.getAttributeByName("newZipDim"));
+    assertNotNull(altered.getAttributeByName("newRefDim"));
+    assertNotNull(altered.getAttributeByName("f2"));
+    assertNotNull(altered.getAttributeByName("stateid"));
+    assertNull(altered.getAttributeByName("f1"));
     assertEquals(1, altered.getExpressions().size());
-    Assert.assertNotNull(altered.getExpressionByName("formattedcode"));
+    assertNotNull(altered.getExpressionByName("formattedcode"));
     assertEquals(altered.getExpressionByName("formattedcode").getExpr(), "format_number(code, \"#,###,###\")");
 
     CubeDimAttribute newzipdim = altered.getAttributeByName("newZipDim");
-    Assert.assertTrue(newzipdim instanceof BaseDimAttribute);
+    assertTrue(newzipdim instanceof BaseDimAttribute);
     assertEquals(((BaseDimAttribute) newzipdim).getType(), "int");
     assertEquals((((BaseDimAttribute) newzipdim).getNumOfDistinctValues().get()), Long.valueOf(1000));
 
     CubeDimAttribute newrefdim = altered.getAttributeByName("newRefDim");
-    Assert.assertTrue(newrefdim instanceof ReferencedDimAtrribute);
+    assertTrue(newrefdim instanceof ReferencedDimAtrribute);
     assertEquals(((ReferencedDimAtrribute) newrefdim).getReferences().size(), 1);
     assertEquals(((ReferencedDimAtrribute) newrefdim).getReferences().get(0).getDestTable(), cityDim.getName());
     assertEquals(((ReferencedDimAtrribute) newrefdim).getReferences().get(0).getDestColumn(), "id");
 
     CubeDimAttribute f2 = altered.getAttributeByName("f2");
-    Assert.assertTrue(f2 instanceof BaseDimAttribute);
+    assertTrue(f2 instanceof BaseDimAttribute);
     assertEquals(((BaseDimAttribute) f2).getType(), "varchar");
 
     CubeDimAttribute stateid = altered.getAttributeByName("stateid");
-    Assert.assertTrue(stateid instanceof ReferencedDimAtrribute);
+    assertTrue(stateid instanceof ReferencedDimAtrribute);
     assertEquals(((ReferencedDimAtrribute) stateid).getReferences().size(), 1);
     assertEquals(((ReferencedDimAtrribute) stateid).getReferences().get(0).getDestTable(), stateDim.getName());
     assertEquals(((ReferencedDimAtrribute) stateid).getReferences().get(0).getDestColumn(), "id");
@@ -521,34 +502,32 @@ public class TestCubeMetastoreClient {
 
   private void validateDim(Dimension udim, Set<CubeDimAttribute> attrs, String basedim, String referdim)
     throws HiveException {
-    Assert.assertTrue(client.tableExists(udim.getName()));
+    assertTrue(client.tableExists(udim.getName()));
     Table dimTbl = client.getHiveTable(udim.getName());
-    Assert.assertTrue(client.isDimension(dimTbl));
+    assertTrue(client.isDimension(dimTbl));
     Dimension dim = new Dimension(dimTbl);
-    Assert.assertTrue(udim.equals(dim));
-    Assert.assertTrue(udim.equals(client.getDimension(udim.getName())));
+    assertTrue(udim.equals(dim));
+    assertTrue(udim.equals(client.getDimension(udim.getName())));
     assertEquals(dim.getAttributes().size(), attrs.size());
-    Assert.assertNotNull(dim.getAttributeByName(basedim));
-    Assert.assertTrue(dim.getAttributeByName(basedim) instanceof BaseDimAttribute);
+    assertNotNull(dim.getAttributeByName(basedim));
+    assertTrue(dim.getAttributeByName(basedim) instanceof BaseDimAttribute);
     if (referdim != null) {
-      Assert.assertNotNull(dim.getAttributeByName(referdim));
-      Assert.assertTrue(dim.getAttributeByName(referdim) instanceof ReferencedDimAtrribute);
+      assertNotNull(dim.getAttributeByName(referdim));
+      assertTrue(dim.getAttributeByName(referdim) instanceof ReferencedDimAtrribute);
     }
-    assertEquals(udim.getAttributeNames().size() + udim.getExpressionNames().size(), dim.getAllFieldNames()
-      .size());
+    assertEquals(udim.getAttributeNames().size() + udim.getExpressionNames().size(), dim.getAllFieldNames().size());
   }
 
   @Test(priority = 1)
   public void testCube() throws Exception {
-    client.createCube(CUBE_NAME, cubeMeasures, cubeDimensions, cubeExpressions, joinChains,
-      new HashMap<String, String>());
-    Assert.assertTrue(client.tableExists(CUBE_NAME));
+    client.createCube(CUBE_NAME, cubeMeasures, cubeDimensions, cubeExpressions, joinChains, emptyHashMap);
+    assertTrue(client.tableExists(CUBE_NAME));
     Table cubeTbl = client.getHiveTable(CUBE_NAME);
-    Assert.assertTrue(client.isCube(cubeTbl));
+    assertTrue(client.isCube(cubeTbl));
     Cube cube2 = new Cube(cubeTbl);
-    Assert.assertTrue(cube.equals(cube2));
-    Assert.assertFalse(cube2.isDerivedCube());
-    Assert.assertTrue(cube2.getTimedDimensions().isEmpty());
+    assertTrue(cube.equals(cube2));
+    assertFalse(cube2.isDerivedCube());
+    assertTrue(cube2.getTimedDimensions().isEmpty());
     assertEquals(cubeMeasures.size(), cube2.getMeasureNames().size());
     // +8 is for hierarchical dimension
     assertEquals(cubeDimensions.size() + 8, cube2.getDimAttributeNames().size());
@@ -558,32 +537,32 @@ public class TestCubeMetastoreClient {
     assertEquals(cubeDimensions.size(), cube2.getDimAttributes().size());
     assertEquals(cubeDimensions.size() + 8 + cubeMeasures.size() + cubeExpressions.size(), cube2
       .getAllFieldNames().size());
-    Assert.assertNotNull(cube2.getMeasureByName("msr4"));
+    assertNotNull(cube2.getMeasureByName("msr4"));
     assertEquals(cube2.getMeasureByName("msr4").getDescription(), "fourth measure");
     assertEquals(cube2.getMeasureByName("msr4").getDisplayString(), "Measure4");
-    Assert.assertNotNull(cube2.getDimAttributeByName("location"));
+    assertNotNull(cube2.getDimAttributeByName("location"));
     assertEquals(cube2.getDimAttributeByName("location").getDescription(), "location hierarchy");
-    Assert.assertNotNull(cube2.getDimAttributeByName("dim1"));
+    assertNotNull(cube2.getDimAttributeByName("dim1"));
     assertEquals(cube2.getDimAttributeByName("dim1").getDescription(), "basedim");
-    Assert.assertNull(cube2.getDimAttributeByName("dim1").getDisplayString());
-    Assert.assertNotNull(cube2.getDimAttributeByName("dim2"));
+    assertNull(cube2.getDimAttributeByName("dim1").getDisplayString());
+    assertNotNull(cube2.getDimAttributeByName("dim2"));
     assertEquals(cube2.getDimAttributeByName("dim2").getDescription(), "ref dim");
     assertEquals(cube2.getDimAttributeByName("dim2").getDisplayString(), "Dim2 refer");
-    Assert.assertNotNull(cube2.getExpressionByName("msr5"));
+    assertNotNull(cube2.getExpressionByName("msr5"));
     assertEquals(cube2.getExpressionByName("msr5").getDescription(), "fifth measure");
     assertEquals(cube2.getExpressionByName("msr5").getDisplayString(), "Avg msr5");
-    Assert.assertNotNull(cube2.getExpressionByName("booleancut"));
+    assertNotNull(cube2.getExpressionByName("booleancut"));
     assertEquals(cube2.getExpressionByName("booleancut").getDescription(), "a boolean expression");
     assertEquals(cube2.getExpressionByName("booleancut").getDisplayString(), "Boolean Cut");
     assertEquals(cube2.getExpressionByName("booleancut").getExpressions().size(), 2);
     // Validate expression can contain delimiter character
-    List<String> booleanCutExprs = new ArrayList<String>(cube2.getExpressionByName("booleancut").getExpressions());
-    Assert.assertTrue(booleanCutExprs.contains("dim1 | dim2 AND dim2 = 'XYZ'"));
-    Assert.assertTrue(cube2.allFieldsQueriable());
+    List<String> booleanCutExprs = new ArrayList<>(cube2.getExpressionByName("booleancut").getExpressions());
+    assertTrue(booleanCutExprs.contains("dim1 | dim2 AND dim2 = 'XYZ'"));
+    assertTrue(cube2.allFieldsQueriable());
 
-    Assert.assertTrue(cube2.getJoinChainNames().contains("cityfromzip"));
-    Assert.assertTrue(cube2.getJoinChainNames().contains("city"));
-    Assert.assertFalse(cube2.getJoinChains().isEmpty());
+    assertTrue(cube2.getJoinChainNames().contains("cityfromzip"));
+    assertTrue(cube2.getJoinChainNames().contains("city"));
+    assertFalse(cube2.getJoinChains().isEmpty());
     assertEquals(cube2.getJoinChains().size(), 2);
     JoinChain zipchain = cube2.getChainByName("cityfromzip");
     assertEquals(zipchain.getDisplayString(), "Zip City");
@@ -606,63 +585,63 @@ public class TestCubeMetastoreClient {
     assertEquals(citychain.getPaths().get(0).getReferences().size(), 2);
     assertEquals(citychain.getPaths().get(0).getReferences().get(0).toString(), "testmetastorecube.cityid");
     assertEquals(citychain.getPaths().get(0).getReferences().get(1).toString(), "citydim.id");
-    Assert.assertNotNull(cube2.getDimAttributeByName("zipcityname"));
+    assertNotNull(cube2.getDimAttributeByName("zipcityname"));
     ChainRefCol zipCityChain = ((ReferencedDimAtrribute) cube2.getDimAttributeByName("zipcityname"))
       .getChainRefColumns().get(0);
     assertEquals(zipCityChain.getChainName(), "cityfromzip");
     assertEquals(zipCityChain.getRefColumn(), "name");
 
-    client.createDerivedCube(CUBE_NAME, DERIVED_CUBE_NAME, measures, dimensions, new HashMap<String, String>(), 0L);
-    Assert.assertTrue(client.tableExists(DERIVED_CUBE_NAME));
+    client.createDerivedCube(CUBE_NAME, DERIVED_CUBE_NAME, measures, dimensions, emptyHashMap, 0L);
+    assertTrue(client.tableExists(DERIVED_CUBE_NAME));
     Table derivedTbl = client.getHiveTable(DERIVED_CUBE_NAME);
-    Assert.assertTrue(client.isCube(derivedTbl));
+    assertTrue(client.isCube(derivedTbl));
     DerivedCube dcube2 = new DerivedCube(derivedTbl, cube);
-    Assert.assertTrue(derivedCube.equals(dcube2));
-    Assert.assertTrue(dcube2.isDerivedCube());
-    Assert.assertTrue(dcube2.getTimedDimensions().isEmpty());
+    assertTrue(derivedCube.equals(dcube2));
+    assertTrue(dcube2.isDerivedCube());
+    assertTrue(dcube2.getTimedDimensions().isEmpty());
     assertEquals(measures.size(), dcube2.getMeasureNames().size());
     assertEquals(dimensions.size(), dcube2.getDimAttributeNames().size());
     assertEquals(measures.size(), dcube2.getMeasures().size());
     assertEquals(dimensions.size(), dcube2.getDimAttributes().size());
-    Assert.assertNotNull(dcube2.getMeasureByName("msr3"));
-    Assert.assertNull(dcube2.getMeasureByName("msr4"));
-    Assert.assertNull(dcube2.getDimAttributeByName("location"));
-    Assert.assertNotNull(dcube2.getDimAttributeByName("dim1"));
-    Assert.assertTrue(dcube2.allFieldsQueriable());
+    assertNotNull(dcube2.getMeasureByName("msr3"));
+    assertNull(dcube2.getMeasureByName("msr4"));
+    assertNull(dcube2.getDimAttributeByName("location"));
+    assertNotNull(dcube2.getDimAttributeByName("dim1"));
+    assertTrue(dcube2.allFieldsQueriable());
 
     client.createCube(CUBE_NAME_WITH_PROPS, cubeMeasures, cubeDimensions, CUBE_PROPERTIES);
-    Assert.assertTrue(client.tableExists(CUBE_NAME_WITH_PROPS));
+    assertTrue(client.tableExists(CUBE_NAME_WITH_PROPS));
     cubeTbl = client.getHiveTable(CUBE_NAME_WITH_PROPS);
-    Assert.assertTrue(client.isCube(cubeTbl));
+    assertTrue(client.isCube(cubeTbl));
     cube2 = new Cube(cubeTbl);
-    Assert.assertTrue(cubeWithProps.equals(cube2));
-    Assert.assertFalse(cube2.isDerivedCube());
-    Assert.assertFalse(cubeWithProps.getTimedDimensions().isEmpty());
-    Assert.assertTrue(cubeWithProps.getTimedDimensions().contains("dt"));
-    Assert.assertTrue(cubeWithProps.getTimedDimensions().contains("mydate"));
+    assertTrue(cubeWithProps.equals(cube2));
+    assertFalse(cube2.isDerivedCube());
+    assertFalse(cubeWithProps.getTimedDimensions().isEmpty());
+    assertTrue(cubeWithProps.getTimedDimensions().contains("dt"));
+    assertTrue(cubeWithProps.getTimedDimensions().contains("mydate"));
     assertEquals(cubeMeasures.size(), cube2.getMeasureNames().size());
     assertEquals(cubeDimensions.size() + 8, cube2.getDimAttributeNames().size());
     assertEquals(cubeMeasures.size(), cube2.getMeasures().size());
     assertEquals(cubeDimensions.size(), cube2.getDimAttributes().size());
-    Assert.assertNotNull(cube2.getMeasureByName("msr4"));
-    Assert.assertNotNull(cube2.getDimAttributeByName("location"));
-    Assert.assertFalse(cube2.allFieldsQueriable());
+    assertNotNull(cube2.getMeasureByName("msr4"));
+    assertNotNull(cube2.getDimAttributeByName("location"));
+    assertFalse(cube2.allFieldsQueriable());
 
     client.createDerivedCube(CUBE_NAME_WITH_PROPS, DERIVED_CUBE_NAME_WITH_PROPS, measures, dimensions,
       CUBE_PROPERTIES, 0L);
-    Assert.assertTrue(client.tableExists(DERIVED_CUBE_NAME_WITH_PROPS));
+    assertTrue(client.tableExists(DERIVED_CUBE_NAME_WITH_PROPS));
     derivedTbl = client.getHiveTable(DERIVED_CUBE_NAME_WITH_PROPS);
-    Assert.assertTrue(client.isCube(derivedTbl));
+    assertTrue(client.isCube(derivedTbl));
     dcube2 = new DerivedCube(derivedTbl, cubeWithProps);
-    Assert.assertTrue(derivedCubeWithProps.equals(dcube2));
-    Assert.assertTrue(dcube2.isDerivedCube());
-    Assert.assertNotNull(derivedCubeWithProps.getProperties().get("cube.custom.prop"));
+    assertTrue(derivedCubeWithProps.equals(dcube2));
+    assertTrue(dcube2.isDerivedCube());
+    assertNotNull(derivedCubeWithProps.getProperties().get("cube.custom.prop"));
     assertEquals(derivedCubeWithProps.getProperties().get("cube.custom.prop"), "myval");
-    Assert.assertNull(dcube2.getMeasureByName("msr4"));
-    Assert.assertNotNull(dcube2.getMeasureByName("msr3"));
-    Assert.assertNull(dcube2.getDimAttributeByName("location"));
-    Assert.assertNotNull(dcube2.getDimAttributeByName("dim1"));
-    Assert.assertTrue(dcube2.allFieldsQueriable());
+    assertNull(dcube2.getMeasureByName("msr4"));
+    assertNotNull(dcube2.getMeasureByName("msr3"));
+    assertNull(dcube2.getDimAttributeByName("location"));
+    assertNotNull(dcube2.getDimAttributeByName("dim1"));
+    assertTrue(dcube2.allFieldsQueriable());
   }
 
   @Test(priority = 1)
@@ -683,23 +662,23 @@ public class TestCubeMetastoreClient {
     toAlter.removeTimedDimension("dt");
 
     JoinChain cityChain = new JoinChain("city", "Cube City", "cube city desc modified");
-    List<TableReference> chain = new ArrayList<TableReference>();
+    List<TableReference> chain = new ArrayList<>();
     chain.add(new TableReference(cubeName, "cityid"));
     chain.add(new TableReference("citydim", "id"));
     cityChain.addPath(chain);
     toAlter.alterJoinChain(cityChain);
     toAlter.removeJoinChain("cityFromZip");
 
-    Assert.assertNotNull(toAlter.getMeasureByName("testAddMsr1"));
-    Assert.assertNotNull(toAlter.getMeasureByName("msr3"));
+    assertNotNull(toAlter.getMeasureByName("testAddMsr1"));
+    assertNotNull(toAlter.getMeasureByName("msr3"));
     assertEquals(toAlter.getMeasureByName("msr3").getDisplayString(), "Measure3Altered");
     assertEquals(toAlter.getMeasureByName("msr3").getDescription(), "third altered measure");
-    Assert.assertNull(toAlter.getMeasureByName("msr4"));
-    Assert.assertNotNull(toAlter.getDimAttributeByName("testAddDim1"));
+    assertNull(toAlter.getMeasureByName("msr4"));
+    assertNotNull(toAlter.getDimAttributeByName("testAddDim1"));
     assertEquals(toAlter.getDimAttributeByName("testAddDim1").getDescription(), "dim to add");
-    Assert.assertNotNull(toAlter.getDimAttributeByName("dim1"));
+    assertNotNull(toAlter.getDimAttributeByName("dim1"));
     assertEquals(toAlter.getDimAttributeByName("dim1").getDescription(), "basedim altered");
-    Assert.assertNull(toAlter.getDimAttributeByName("location2"));
+    assertNull(toAlter.getDimAttributeByName("location2"));
 
     client.alterCube(cubeName, toAlter);
 
@@ -708,35 +687,35 @@ public class TestCubeMetastoreClient {
     Cube altered = new Cube(alteredHiveTbl);
 
     assertEquals(toAlter, altered);
-    Assert.assertNotNull(altered.getMeasureByName("testAddMsr1"));
+    assertNotNull(altered.getMeasureByName("testAddMsr1"));
     CubeMeasure addedMsr = altered.getMeasureByName("testAddMsr1");
     assertEquals(addedMsr.getType(), "int");
-    Assert.assertNotNull(altered.getDimAttributeByName("testAddDim1"));
+    assertNotNull(altered.getDimAttributeByName("testAddDim1"));
     BaseDimAttribute addedDim = (BaseDimAttribute) altered.getDimAttributeByName("testAddDim1");
     assertEquals(addedDim.getType(), "string");
     assertEquals(addedDim.getDescription(), "dim to add");
-    Assert.assertTrue(altered.getTimedDimensions().contains("zt"));
+    assertTrue(altered.getTimedDimensions().contains("zt"));
     assertEquals(altered.getMeasureByName("msr3").getDisplayString(), "Measure3Altered");
     assertEquals(altered.getMeasureByName("msr3").getDescription(), "third altered measure");
-    Assert.assertNotNull(altered.getDimAttributeByName("dim1"));
+    assertNotNull(altered.getDimAttributeByName("dim1"));
     assertEquals(altered.getDimAttributeByName("dim1").getDescription(), "basedim altered");
-    Assert.assertNull(altered.getDimAttributeByName("location2"));
-    Assert.assertNull(altered.getChainByName("cityFromZip"));
+    assertNull(altered.getDimAttributeByName("location2"));
+    assertNull(altered.getChainByName("cityFromZip"));
     assertEquals(altered.getChainByName("city").getDescription(), "cube city desc modified");
 
     toAlter.alterMeasure(new ColumnMeasure(new FieldSchema("testAddMsr1", "double", "testAddMeasure")));
     client.alterCube(cubeName, toAlter);
     altered = new Cube(Hive.get(conf).getTable(cubeName));
     addedMsr = altered.getMeasureByName("testaddmsr1");
-    Assert.assertNotNull(addedMsr);
+    assertNotNull(addedMsr);
     assertEquals(addedMsr.getType(), "double");
-    Assert.assertTrue(client.getAllFacts(altered).isEmpty());
+    assertTrue(client.getAllFacts(altered).isEmpty());
   }
 
   @Test(priority = 2)
   public void testAlterDerivedCube() throws Exception {
     String name = "alter_derived_cube";
-    client.createDerivedCube(CUBE_NAME, name, measures, dimensions, new HashMap<String, String>(), 0L);
+    client.createDerivedCube(CUBE_NAME, name, measures, dimensions, emptyHashMap, 0L);
     // Test alter cube
     Table cubeTbl = client.getHiveTable(name);
     DerivedCube toAlter = new DerivedCube(cubeTbl, (Cube) client.getCube(CUBE_NAME));
@@ -745,34 +724,34 @@ public class TestCubeMetastoreClient {
     toAlter.addDimension("dim1StartTime");
     toAlter.removeDimension("dim1");
 
-    Assert.assertNotNull(toAlter.getMeasureByName("msr4"));
-    Assert.assertNotNull(toAlter.getMeasureByName("msr2"));
-    Assert.assertNull(toAlter.getMeasureByName("msr3"));
-    Assert.assertNotNull(toAlter.getDimAttributeByName("dim1StartTime"));
-    Assert.assertNotNull(toAlter.getDimAttributeByName("dim2"));
-    Assert.assertNull(toAlter.getDimAttributeByName("dim1"));
+    assertNotNull(toAlter.getMeasureByName("msr4"));
+    assertNotNull(toAlter.getMeasureByName("msr2"));
+    assertNull(toAlter.getMeasureByName("msr3"));
+    assertNotNull(toAlter.getDimAttributeByName("dim1StartTime"));
+    assertNotNull(toAlter.getDimAttributeByName("dim2"));
+    assertNull(toAlter.getDimAttributeByName("dim1"));
 
     client.alterCube(name, toAlter);
 
     DerivedCube altered = (DerivedCube) client.getCube(name);
 
     assertEquals(toAlter, altered);
-    Assert.assertNotNull(altered.getMeasureByName("msr4"));
+    assertNotNull(altered.getMeasureByName("msr4"));
     CubeMeasure addedMsr = altered.getMeasureByName("msr4");
     assertEquals(addedMsr.getType(), "bigint");
-    Assert.assertNotNull(altered.getDimAttributeByName("dim1StartTime"));
+    assertNotNull(altered.getDimAttributeByName("dim1StartTime"));
     BaseDimAttribute addedDim = (BaseDimAttribute) altered.getDimAttributeByName("dim1StartTime");
     assertEquals(addedDim.getType(), "string");
-    Assert.assertNotNull(addedDim.getStartTime());
+    assertNotNull(addedDim.getStartTime());
 
     client.dropCube(name);
-    Assert.assertFalse(client.tableExists(name));
+    assertFalse(client.tableExists(name));
   }
 
   @Test(priority = 2)
   public void testCubeFact() throws Exception {
     String factName = "testMetastoreFact";
-    List<FieldSchema> factColumns = new ArrayList<FieldSchema>(cubeMeasures.size());
+    List<FieldSchema> factColumns = new ArrayList<>(cubeMeasures.size());
     for (CubeMeasure measure : cubeMeasures) {
       factColumns.add(measure.getColumn());
     }
@@ -780,114 +759,86 @@ public class TestCubeMetastoreClient {
     // add one dimension of the cube
     factColumns.add(new FieldSchema("zipcode", "int", "zip"));
 
-    Map<String, Set<UpdatePeriod>> updatePeriods = new HashMap<String, Set<UpdatePeriod>>();
-    Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
-    ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
-    List<String> timePartCols = new ArrayList<String>();
-    partCols.add(getDatePartition());
-    timePartCols.add(getDatePartitionKey());
-    updates.add(HOURLY);
-    updates.add(DAILY);
-    StorageTableDesc s1 = new StorageTableDesc();
-    s1.setInputFormat(TextInputFormat.class.getCanonicalName());
-    s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
-    s1.setPartCols(partCols);
-    s1.setTimePartCols(timePartCols);
-    updatePeriods.put(c1, updates);
-    Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
-    storageTables.put(c1, s1);
+    StorageTableDesc s1 = new StorageTableDesc(TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class,
+      datePartSingleton, datePartKeySingleton);
+    Map<String, Set<UpdatePeriod>> updatePeriods = getHashMap(c1, Sets.newHashSet(HOURLY, DAILY));
+    Map<String, StorageTableDesc> storageTables = getHashMap(c1, s1);
 
     CubeFactTable cubeFact = new CubeFactTable(CUBE_NAME, factName, factColumns, updatePeriods);
 
     // create cube fact
     client.createCubeFactTable(CUBE_NAME, factName, factColumns, updatePeriods, 0L, null, storageTables);
-    Assert.assertTrue(client.tableExists(factName));
+    assertTrue(client.tableExists(factName));
     Table cubeTbl = client.getHiveTable(factName);
-    Assert.assertTrue(client.isFactTable(cubeTbl));
-    Assert.assertTrue(client.isFactTableForCube(cubeTbl, CUBE_NAME));
+    assertTrue(client.isFactTable(cubeTbl));
+    assertTrue(client.isFactTableForCube(cubeTbl, CUBE_NAME));
     assertEquals(client.getAllFacts(client.getCube(CUBE_NAME)).get(0).getName(), factName.toLowerCase());
-    assertEquals(client.getAllFacts(client.getCube(DERIVED_CUBE_NAME)).get(0).getName(),
-      factName.toLowerCase());
+    assertEquals(client.getAllFacts(client.getCube(DERIVED_CUBE_NAME)).get(0).getName(), factName.toLowerCase());
     CubeFactTable cubeFact2 = new CubeFactTable(cubeTbl);
-    Assert.assertTrue(cubeFact.equals(cubeFact2));
+    assertTrue(cubeFact.equals(cubeFact2));
 
     // Assert for storage tables
     for (String entry : storageTables.keySet()) {
-      String storageTableName = MetastoreUtil.getFactOrDimtableStorageTableName(factName, entry);
-      Assert.assertTrue(client.tableExists(storageTableName));
+      String storageTableName = getFactOrDimtableStorageTableName(factName, entry);
+      assertTrue(client.tableExists(storageTableName));
     }
 
-    Map<String, Date> timeParts = new HashMap<String, Date>();
-    timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
-    timeParts.put("non_existing_part_col", now);
+    Map<String, Date> timeParts = getTimePartitionByOffsets(getDatePartitionKey(), 0, "non_existing_part_col", 0);
     // test error on adding invalid partition
     // test partition
     StoragePartitionDesc partSpec = new StoragePartitionDesc(cubeFact.getName(), timeParts, null, HOURLY);
-    try{
+    try {
       client.addPartition(partSpec, c1);
       fail("Add should fail since non_existing_part_col is non-existing");
-    } catch(LensException e){
+    } catch (LensException e) {
       assertEquals(e.getErrorCode(), LensCubeErrorCode.TIMELINE_ABSENT.getLensErrorInfo().getErrorCode());
     }
     timeParts.remove("non_existing_part_col");
     partSpec = new StoragePartitionDesc(cubeFact.getName(), timeParts, null, HOURLY);
     client.addPartition(partSpec, c1);
-    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
-      new HashMap<String, String>()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
-      TestCubeMetastoreClient.getDatePartitionKey()));
+    assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts, emptyHashMap));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, getDatePartitionKey()));
 
     // Partition with different schema
     FieldSchema newcol = new FieldSchema("newcol", "int", "new col for part");
     cubeFact.alterColumn(newcol);
     client.alterCubeFactTable(cubeFact.getName(), cubeFact, storageTables);
-    String storageTableName = MetastoreUtil.getFactOrDimtableStorageTableName(factName, c1);
+    String storageTableName = getFactOrDimtableStorageTableName(factName, c1);
     assertEquals(client.getAllParts(storageTableName).size(), 1);
     List<Partition> parts = client.getPartitionsByFilter(storageTableName, "dt='latest'");
     assertEquals(parts.size(), 0);
 
-    Map<String, Date> timeParts2 = new HashMap<String, Date>();
-    timeParts2.put(TestCubeMetastoreClient.getDatePartitionKey(), nowPlus1);
-    StoragePartitionDesc partSpec2 =
-      new StoragePartitionDesc(cubeFact.getName(), timeParts2, null, HOURLY);
+    Map<String, Date> timeParts2 = getTimePartitionByOffsets(getDatePartitionKey(), 1);
+    StoragePartitionDesc partSpec2 = new StoragePartitionDesc(cubeFact.getName(), timeParts2, null, HOURLY);
     partSpec2.setInputFormat(SequenceFileInputFormat.class.getCanonicalName());
     partSpec2.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
     client.addPartition(partSpec2, c1);
     assertEquals(client.getAllParts(storageTableName).size(), 2);
-    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
-      new HashMap<String, String>()));
-    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts2,
-      new HashMap<String, String>()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
-      TestCubeMetastoreClient.getDatePartitionKey()));
+    assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts, emptyHashMap));
+    assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts2, emptyHashMap));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, getDatePartitionKey()));
     parts = client.getPartitionsByFilter(storageTableName, "dt='latest'");
     assertEquals(parts.size(), 0);
 
     client.dropPartition(cubeFact.getName(), c1, timeParts2, null, HOURLY);
     assertEquals(client.getAllParts(storageTableName).size(), 1);
-    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
-      new HashMap<String, String>()));
-    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts2,
-      new HashMap<String, String>()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
-      TestCubeMetastoreClient.getDatePartitionKey()));
+    assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts, emptyHashMap));
+    assertFalse(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts2, emptyHashMap));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, getDatePartitionKey()));
     parts = client.getPartitionsByFilter(storageTableName, "dt='latest'");
     assertEquals(parts.size(), 0);
 
     client.dropPartition(cubeFact.getName(), c1, timeParts, null, HOURLY);
     assertEquals(client.getAllParts(storageTableName).size(), 0);
-    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
-      new HashMap<String, String>()));
-    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts2,
-      new HashMap<String, String>()));
-    Assert.assertFalse(client.latestPartitionExists(cubeFact.getName(), c1,
-      TestCubeMetastoreClient.getDatePartitionKey()));
+    assertFalse(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts, emptyHashMap));
+    assertFalse(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts2, emptyHashMap));
+    assertFalse(client.latestPartitionExists(cubeFact.getName(), c1, getDatePartitionKey()));
   }
 
   @Test(priority = 2)
   public void testAlterCubeFact() throws Exception {
     String factName = "test_alter_fact";
-    List<FieldSchema> factColumns = new ArrayList<FieldSchema>(cubeMeasures.size());
+    List<FieldSchema> factColumns = new ArrayList<>(cubeMeasures.size());
     for (CubeMeasure measure : cubeMeasures) {
       factColumns.add(measure.getColumn());
     }
@@ -895,29 +846,10 @@ public class TestCubeMetastoreClient {
     // add one dimension of the cube
     factColumns.add(new FieldSchema("zipcode", "int", "zip"));
 
-    Map<String, Set<UpdatePeriod>> updatePeriods = new HashMap<String, Set<UpdatePeriod>>();
-    Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
-    updates.add(HOURLY);
-    updates.add(DAILY);
-
-    ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
-    List<String> timePartCols = new ArrayList<String>();
-    partCols.add(getDatePartition());
-    timePartCols.add(getDatePartitionKey());
-    updates.add(HOURLY);
-    updates.add(DAILY);
-    StorageTableDesc s1 = new StorageTableDesc();
-    s1.setInputFormat(TextInputFormat.class.getCanonicalName());
-    s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
-    s1.setPartCols(partCols);
-    s1.setTimePartCols(timePartCols);
-    updatePeriods.put(c1, updates);
-
-    updatePeriods.put(c2, updates);
-
-    Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
-    storageTables.put(c1, s1);
-    storageTables.put(c2, s1);
+    StorageTableDesc s1 = new StorageTableDesc(TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class,
+      datePartSingleton, datePartKeySingleton);
+    Map<String, Set<UpdatePeriod>> updatePeriods = getHashMap(c1, hourlyAndDaily, c2, hourlyAndDaily);
+    Map<String, StorageTableDesc> storageTables = getHashMap(c1, s1, c2, s1);
 
     // create cube fact
     client.createCubeFactTable(CUBE_NAME, factName, factColumns, updatePeriods, 0L, null, storageTables);
@@ -926,15 +858,11 @@ public class TestCubeMetastoreClient {
     factTable.alterColumn(new FieldSchema("testFactColAdd", "int", "test add column"));
     factTable.alterColumn(new FieldSchema("msr3", "int", "test alter column"));
     factTable.alterWeight(100L);
-    Map<String, String> newProp = new HashMap<String, String>();
-    newProp.put("new.prop", "val");
+    Map<String, String> newProp = getHashMap("new.prop", "val");
     factTable.addProperties(newProp);
     factTable.addUpdatePeriod(c1, MONTHLY);
     factTable.removeUpdatePeriod(c1, HOURLY);
-    Set<UpdatePeriod> alterupdates = new HashSet<UpdatePeriod>();
-    alterupdates.add(HOURLY);
-    alterupdates.add(DAILY);
-    alterupdates.add(MONTHLY);
+    Set<UpdatePeriod> alterupdates = Sets.newHashSet(HOURLY, DAILY, MONTHLY);
     factTable.alterStorage(c2, alterupdates);
 
     client.alterCubeFactTable(factName, factTable, storageTables);
@@ -942,14 +870,14 @@ public class TestCubeMetastoreClient {
     Table factHiveTable = Hive.get(conf).getTable(factName);
     CubeFactTable altered = new CubeFactTable(factHiveTable);
 
-    Assert.assertTrue(altered.weight() == 100L);
-    Assert.assertTrue(altered.getProperties().get("new.prop").equals("val"));
-    Assert.assertTrue(altered.getUpdatePeriods().get(c1).contains(MONTHLY));
-    Assert.assertFalse(altered.getUpdatePeriods().get(c1).contains(HOURLY));
-    Assert.assertTrue(altered.getUpdatePeriods().get(c2).contains(MONTHLY));
-    Assert.assertTrue(altered.getUpdatePeriods().get(c2).contains(DAILY));
-    Assert.assertTrue(altered.getUpdatePeriods().get(c2).contains(HOURLY));
-    Assert.assertTrue(altered.getCubeName().equalsIgnoreCase(CUBE_NAME.toLowerCase()));
+    assertTrue(altered.weight() == 100L);
+    assertTrue(altered.getProperties().get("new.prop").equals("val"));
+    assertTrue(altered.getUpdatePeriods().get(c1).contains(MONTHLY));
+    assertFalse(altered.getUpdatePeriods().get(c1).contains(HOURLY));
+    assertTrue(altered.getUpdatePeriods().get(c2).contains(MONTHLY));
+    assertTrue(altered.getUpdatePeriods().get(c2).contains(DAILY));
+    assertTrue(altered.getUpdatePeriods().get(c2).contains(HOURLY));
+    assertTrue(altered.getCubeName().equalsIgnoreCase(CUBE_NAME.toLowerCase()));
     boolean contains = false;
     for (FieldSchema column : altered.getColumns()) {
       if (column.getName().equals("testfactcoladd") && column.getType().equals("int")) {
@@ -957,28 +885,23 @@ public class TestCubeMetastoreClient {
         break;
       }
     }
-    Assert.assertTrue(contains);
+    assertTrue(contains);
 
     // alter storage table desc
-    String c1TableName = MetastoreUtil.getFactOrDimtableStorageTableName(factName, c1);
+    String c1TableName = getFactOrDimtableStorageTableName(factName, c1);
     Table c1Table = client.getTable(c1TableName);
-    assertEquals(c1Table.getInputFormatClass().getCanonicalName(),
-      TextInputFormat.class.getCanonicalName());
-    s1 = new StorageTableDesc();
-    s1.setInputFormat(SequenceFileInputFormat.class.getCanonicalName());
-    s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
-    s1.setPartCols(partCols);
-    s1.setTimePartCols(timePartCols);
+    assertEquals(c1Table.getInputFormatClass().getCanonicalName(), TextInputFormat.class.getCanonicalName());
+    s1 = new StorageTableDesc(SequenceFileInputFormat.class, HiveIgnoreKeyTextOutputFormat.class,
+      datePartSingleton, datePartKeySingleton);
     s1.setFieldDelim(":");
     storageTables.put(c1, s1);
     storageTables.put(c4, s1);
-    factTable.addStorage(c4, updates);
+    factTable.addStorage(c4, hourlyAndDaily);
     client.alterCubeFactTable(factName, factTable, storageTables);
     CubeFactTable altered2 = client.getCubeFact(factName);
-    Assert.assertTrue(client.tableExists(c1TableName));
+    assertTrue(client.tableExists(c1TableName));
     Table alteredC1Table = client.getTable(c1TableName);
-    assertEquals(alteredC1Table.getInputFormatClass().getCanonicalName(),
-      SequenceFileInputFormat.class.getCanonicalName());
+    assertEquals(alteredC1Table.getInputFormatClass(), SequenceFileInputFormat.class);
     assertEquals(alteredC1Table.getSerdeParam(serdeConstants.FIELD_DELIM), ":");
 
     boolean storageTableColsAltered = false;
@@ -988,51 +911,51 @@ public class TestCubeMetastoreClient {
         break;
       }
     }
-    Assert.assertTrue(storageTableColsAltered);
+    assertTrue(storageTableColsAltered);
 
-    Assert.assertTrue(altered2.getStorages().contains("C4"));
-    Assert.assertTrue(altered2.getUpdatePeriods().get("C4").equals(updates));
-    String c4TableName = MetastoreUtil.getFactOrDimtableStorageTableName(factName, c4);
-    Assert.assertTrue(client.tableExists(c4TableName));
+    assertTrue(altered2.getStorages().contains("C4"));
+    assertTrue(altered2.getUpdatePeriods().get("C4").equals(hourlyAndDaily));
+    String c4TableName = getFactOrDimtableStorageTableName(factName, c4);
+    assertTrue(client.tableExists(c4TableName));
 
     // add storage
-    client.addStorage(altered2, c3, updates, s1);
+    client.addStorage(altered2, c3, hourlyAndDaily, s1);
     CubeFactTable altered3 = client.getCubeFact(factName);
-    Assert.assertTrue(altered3.getStorages().contains("C3"));
-    Assert.assertTrue(altered3.getUpdatePeriods().get("C3").equals(updates));
-    String storageTableName = MetastoreUtil.getFactOrDimtableStorageTableName(factName, c3);
-    Assert.assertTrue(client.tableExists(storageTableName));
+    assertTrue(altered3.getStorages().contains("C3"));
+    assertTrue(altered3.getUpdatePeriods().get("C3").equals(hourlyAndDaily));
+    String storageTableName = getFactOrDimtableStorageTableName(factName, c3);
+    assertTrue(client.tableExists(storageTableName));
     client.dropStorageFromFact(factName, c2);
-    storageTableName = MetastoreUtil.getFactOrDimtableStorageTableName(factName, c2);
-    Assert.assertFalse(client.tableExists(storageTableName));
+    storageTableName = getFactOrDimtableStorageTableName(factName, c2);
+    assertFalse(client.tableExists(storageTableName));
     List<CubeFactTable> cubeFacts = client.getAllFacts(client.getCube(CUBE_NAME));
-    List<String> cubeFactNames = new ArrayList<String>();
+    List<String> cubeFactNames = new ArrayList<>();
     for (CubeFactTable cfact : cubeFacts) {
       cubeFactNames.add(cfact.getName());
     }
-    Assert.assertTrue(cubeFactNames.contains(factName.toLowerCase()));
+    assertTrue(cubeFactNames.contains(factName.toLowerCase()));
     cubeFacts = client.getAllFacts(client.getCube(DERIVED_CUBE_NAME));
-    cubeFactNames = new ArrayList<String>();
+    cubeFactNames = new ArrayList<>();
     for (CubeFactTable cfact : cubeFacts) {
       cubeFactNames.add(cfact.getName());
     }
-    Assert.assertTrue(cubeFactNames.contains(factName.toLowerCase()));
+    assertTrue(cubeFactNames.contains(factName.toLowerCase()));
     client.dropFact(factName, true);
-    Assert.assertFalse(client.tableExists(MetastoreUtil.getFactOrDimtableStorageTableName(factName, c1)));
-    Assert.assertFalse(client.tableExists(MetastoreUtil.getFactOrDimtableStorageTableName(factName, c3)));
-    Assert.assertFalse(client.tableExists(factName));
+    assertFalse(client.tableExists(getFactOrDimtableStorageTableName(factName, c1)));
+    assertFalse(client.tableExists(getFactOrDimtableStorageTableName(factName, c3)));
+    assertFalse(client.tableExists(factName));
     cubeFacts = client.getAllFacts(cube);
-    cubeFactNames = new ArrayList<String>();
+    cubeFactNames = new ArrayList<>();
     for (CubeFactTable cfact : cubeFacts) {
       cubeFactNames.add(cfact.getName());
     }
-    Assert.assertFalse(cubeFactNames.contains(factName.toLowerCase()));
+    assertFalse(cubeFactNames.contains(factName.toLowerCase()));
   }
 
   @Test(priority = 2)
   public void testCubeFactWithTwoTimedParts() throws Exception {
     String factName = "testMetastoreFactTimedParts";
-    List<FieldSchema> factColumns = new ArrayList<FieldSchema>(cubeMeasures.size());
+    List<FieldSchema> factColumns = new ArrayList<>(cubeMeasures.size());
     for (CubeMeasure measure : cubeMeasures) {
       factColumns.add(measure.getColumn());
     }
@@ -1040,61 +963,40 @@ public class TestCubeMetastoreClient {
     // add one dimension of the cube
     factColumns.add(new FieldSchema("zipcode", "int", "zip"));
 
-    Map<String, Set<UpdatePeriod>> updatePeriods = new HashMap<String, Set<UpdatePeriod>>();
-    Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
-    updates.add(HOURLY);
-    updates.add(DAILY);
+    Set<UpdatePeriod> updates = Sets.newHashSet(HOURLY, DAILY);
     FieldSchema testDtPart = new FieldSchema("mydate", "string", "date part");
-    ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
-    List<String> timePartCols = new ArrayList<String>();
-    partCols.add(getDatePartition());
-    partCols.add(testDtPart);
-    timePartCols.add(getDatePartitionKey());
-    timePartCols.add(testDtPart.getName());
-    StorageTableDesc s1 = new StorageTableDesc();
-    s1.setInputFormat(TextInputFormat.class.getCanonicalName());
-    s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
-    s1.setPartCols(partCols);
-    s1.setTimePartCols(timePartCols);
-    updatePeriods.put(c1, updates);
-
-    Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
-    storageTables.put(c1, s1);
+    StorageTableDesc s1 = new StorageTableDesc(TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class,
+      Lists.newArrayList(getDatePartition(), testDtPart),
+      Lists.newArrayList(getDatePartitionKey(), testDtPart.getName()));
+    Map<String, Set<UpdatePeriod>> updatePeriods = getHashMap(c1, updates);
+    Map<String, StorageTableDesc> storageTables = getHashMap(c1, s1);
 
     CubeFactTable cubeFact = new CubeFactTable(CUBE_NAME_WITH_PROPS, factName, factColumns, updatePeriods);
 
     // create cube fact
     client.createCubeFactTable(CUBE_NAME_WITH_PROPS, factName, factColumns, updatePeriods, 0L, null, storageTables);
 
-    Assert.assertTrue(client.tableExists(factName));
+    assertTrue(client.tableExists(factName));
     Table cubeTbl = client.getHiveTable(factName);
-    Assert.assertTrue(client.isFactTable(cubeTbl));
-    Assert.assertTrue(client.isFactTableForCube(cubeTbl, CUBE_NAME_WITH_PROPS));
+    assertTrue(client.isFactTable(cubeTbl));
+    assertTrue(client.isFactTableForCube(cubeTbl, CUBE_NAME_WITH_PROPS));
     CubeFactTable cubeFact2 = new CubeFactTable(cubeTbl);
-    Assert.assertTrue(cubeFact.equals(cubeFact2));
+    assertTrue(cubeFact.equals(cubeFact2));
 
     // Assert for storage tables
     for (String entry : storageTables.keySet()) {
-      String storageTableName = MetastoreUtil.getFactOrDimtableStorageTableName(factName, entry);
-      Assert.assertTrue(client.tableExists(storageTableName));
+      String storageTableName = getFactOrDimtableStorageTableName(factName, entry);
+      assertTrue(client.tableExists(storageTableName));
     }
 
-    // test partition
-    Calendar cal = new GregorianCalendar();
-    cal.setTime(now);
-    cal.add(Calendar.HOUR, -1);
-    Date testDt = cal.getTime();
-    Map<String, Date> timeParts = new HashMap<String, Date>();
-    timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
-    timeParts.put(testDtPart.getName(), testDt);
+    //test partition
+    Map<String, Date> timeParts = getTimePartitionByOffsets(getDatePartitionKey(), 0, testDtPart.getName(), -1);
     StoragePartitionDesc partSpec = new StoragePartitionDesc(cubeFact.getName(), timeParts, null, HOURLY);
     client.addPartition(partSpec, c1);
-    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
-      new HashMap<String, String>()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, testDtPart.getName()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
-      TestCubeMetastoreClient.getDatePartitionKey()));
-    String storageTableName = MetastoreUtil.getFactOrDimtableStorageTableName(cubeFact.getName(), c1);
+    assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts, emptyHashMap));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, testDtPart.getName()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, getDatePartitionKey()));
+    String storageTableName = getFactOrDimtableStorageTableName(cubeFact.getName(), c1);
     List<Partition> parts = client.getPartitionsByFilter(storageTableName, "dt='latest'");
     assertEquals(parts.size(), 0);
 
@@ -1104,81 +1006,61 @@ public class TestCubeMetastoreClient {
 
     client.dropPartition(cubeFact.getName(), c1, timeParts, null, HOURLY);
     assertEquals(client.getAllParts(storageTableName).size(), 0);
-    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
-      new HashMap<String, String>()));
-    Assert.assertFalse(client.latestPartitionExists(cubeFact.getName(), c1,
-      TestCubeMetastoreClient.getDatePartitionKey()));
-    Assert.assertFalse(client.latestPartitionExists(cubeFact.getName(), c1, testDtPart.getName()));
+    assertFalse(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts, emptyHashMap));
+    assertFalse(client.latestPartitionExists(cubeFact.getName(), c1, getDatePartitionKey()));
+    assertFalse(client.latestPartitionExists(cubeFact.getName(), c1, testDtPart.getName()));
   }
 
   @Test(priority = 2)
   public void testCubeFactWithThreeTimedParts() throws Exception {
     String factName = "testMetastoreFact3TimedParts";
-    List<FieldSchema> factColumns = new ArrayList<FieldSchema>(cubeMeasures.size());
+    List<FieldSchema> factColumns = new ArrayList<>(cubeMeasures.size());
     for (CubeMeasure measure : cubeMeasures) {
       factColumns.add(measure.getColumn());
     }
 
     // add one dimension of the cube
     factColumns.add(new FieldSchema("zipcode", "int", "zip"));
-
-    Map<String, Set<UpdatePeriod>> updatePeriods = new HashMap<String, Set<UpdatePeriod>>();
-    Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
-    updates.add(HOURLY);
-    updates.add(DAILY);
     FieldSchema itPart = new FieldSchema("it", "string", "date part");
     FieldSchema etPart = new FieldSchema("et", "string", "date part");
-    ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
-    List<String> timePartCols = new ArrayList<String>();
-    partCols.add(getDatePartition());
-    partCols.add(itPart);
-    partCols.add(etPart);
-    timePartCols.add(getDatePartitionKey());
-    timePartCols.add(itPart.getName());
-    timePartCols.add(etPart.getName());
-    StorageTableDesc s1 = new StorageTableDesc();
-    s1.setInputFormat(TextInputFormat.class.getCanonicalName());
-    s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
-    s1.setPartCols(partCols);
-    s1.setTimePartCols(timePartCols);
-    updatePeriods.put(c1, updates);
-    updatePeriods.put(c2, updates);
-
-    Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
-    storageTables.put(c1, s1);
-    storageTables.put(c2, s1);
+    StorageTableDesc s1 = new StorageTableDesc(TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class,
+      Lists.newArrayList(getDatePartition(), itPart, etPart),
+      Lists.newArrayList(getDatePartitionKey(), itPart.getName(), etPart.getName()));
+
+    Map<String, Set<UpdatePeriod>> updatePeriods = getHashMap(c1, hourlyAndDaily, c2, hourlyAndDaily);
+    Map<String, StorageTableDesc> storageTables = getHashMap(c1, s1, c2, s1);
 
     CubeFactTable cubeFact = new CubeFactTable(CUBE_NAME_WITH_PROPS, factName, factColumns, updatePeriods);
 
     // create cube fact
     client.createCubeFactTable(CUBE_NAME_WITH_PROPS, factName, factColumns, updatePeriods, 0L, null, storageTables);
 
-    Assert.assertTrue(client.tableExists(factName));
+    assertTrue(client.tableExists(factName));
     Table cubeTbl = client.getHiveTable(factName);
-    Assert.assertTrue(client.isFactTable(cubeTbl));
-    Assert.assertTrue(client.isFactTableForCube(cubeTbl, CUBE_NAME_WITH_PROPS));
+    assertTrue(client.isFactTable(cubeTbl));
+    assertTrue(client.isFactTableForCube(cubeTbl, CUBE_NAME_WITH_PROPS));
     CubeFactTable cubeFact2 = new CubeFactTable(cubeTbl);
-    Assert.assertTrue(cubeFact.equals(cubeFact2));
+    assertTrue(cubeFact.equals(cubeFact2));
 
     String[] storages = new String[]{c1, c2};
     String[] partColNames = new String[]{getDatePartitionKey(), itPart.getName(), etPart.getName()};
 
     // Assert for storage tables
     for (String entry : storageTables.keySet()) {
-      String storageTableName = MetastoreUtil.getFactOrDimtableStorageTableName(factName, entry);
-      Assert.assertTrue(client.tableExists(storageTableName));
+      String storageTableName = getFactOrDimtableStorageTableName(factName, entry);
+      assertTrue(client.tableExists(storageTableName));
     }
 
-    String c1TableName = MetastoreUtil.getFactOrDimtableStorageTableName(cubeFact.getName(), c1);
-    String c2TableName = MetastoreUtil.getFactOrDimtableStorageTableName(cubeFact.getName(), c2);
+    String c1TableName = getFactOrDimtableStorageTableName(cubeFact.getName(), c1);
+    String c2TableName = getFactOrDimtableStorageTableName(cubeFact.getName(), c2);
 
-    Table c1Table = client.getHiveTable(c1TableName);
+    client.getHiveTable(c1TableName);
     Table c2Table = client.getHiveTable(c2TableName);
-    c2Table.getParameters().put(MetastoreUtil.getPartitionTimelineStorageClassKey(HOURLY,
+    c2Table.getParameters().put(getPartitionTimelineStorageClassKey(HOURLY,
       getDatePartitionKey()), StoreAllPartitionTimeline.class.getCanonicalName());
-    c2Table.getParameters().put(MetastoreUtil.getPartitionTimelineStorageClassKey(HOURLY,
+    c2Table.getParameters().put(getPartitionTimelineStorageClassKey(HOURLY,
       itPart.getName()), StoreAllPartitionTimeline.class.getCanonicalName());
-    c2Table.getParameters().put(MetastoreUtil.getPartitionTimelineStorageClassKey(HOURLY,
+    c2Table.getParameters().put(getPartitionTimelineStorageClassKey(HOURLY,
       etPart.getName()), StoreAllPartitionTimeline.class.getCanonicalName());
     client.pushHiveTable(c2Table);
 
@@ -1197,33 +1079,23 @@ public class TestCubeMetastoreClient {
     StoreAllPartitionTimeline timelineEtC2 = ((StoreAllPartitionTimeline) client.partitionTimelineCache.get(
       factName, c2, HOURLY, etPart.getName()));
 
-    Map<String, Date> timeParts1 = new HashMap<String, Date>();
-    timeParts1.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
-    timeParts1.put(itPart.getName(), now);
-    timeParts1.put(etPart.getName(), now);
-    StoragePartitionDesc partSpec1 = new StoragePartitionDesc(cubeFact.getName(), timeParts1, null,
-      HOURLY);
+    Map<String, Date> timeParts1 = getTimePartitionByOffsets(getDatePartitionKey(), 0, itPart.getName(), 0,
+      etPart.getName(), 0);
+    StoragePartitionDesc partSpec1 = new StoragePartitionDesc(cubeFact.getName(), timeParts1, null, HOURLY);
 
-    Map<String, Date> timeParts2 = new HashMap<String, Date>();
-    timeParts2.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
-    timeParts2.put(etPart.getName(), nowPlus1);
-    Map<String, String> nonTimeSpec = new HashMap<String, String>();
-    nonTimeSpec.put(itPart.getName(), "default");
+    Map<String, Date> timeParts2 = getTimePartitionByOffsets(getDatePartitionKey(), 0, etPart.getName(), 1);
+    Map<String, String> nonTimeSpec = getHashMap(itPart.getName(), "default");
     final StoragePartitionDesc partSpec2 = new StoragePartitionDesc(cubeFact.getName(), timeParts2, nonTimeSpec,
       HOURLY);
 
-    Map<String, Date> timeParts3 = new HashMap<String, Date>();
-    timeParts3.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
-    timeParts3.put(etPart.getName(), now);
+    Map<String, Date> timeParts3 = getTimePartitionByOffsets(getDatePartitionKey(), 0, etPart.getName(), 0);
     final StoragePartitionDesc partSpec3 = new StoragePartitionDesc(cubeFact.getName(), timeParts3, nonTimeSpec,
       HOURLY);
 
     client.addPartitions(Arrays.asList(partSpec1, partSpec2, partSpec3), c1);
     client.addPartitions(Arrays.asList(partSpec1, partSpec2, partSpec3), c2);
-    PartitionTimeline timeline1Temp = client.partitionTimelineCache.get(factName, c1, HOURLY,
-      getDatePartitionKey());
-    PartitionTimeline timeline2Temp = client.partitionTimelineCache.get(factName, c2, HOURLY,
-      getDatePartitionKey());
+    PartitionTimeline timeline1Temp = client.partitionTimelineCache.get(factName, c1, HOURLY, getDatePartitionKey());
+    PartitionTimeline timeline2Temp = client.partitionTimelineCache.get(factName, c2, HOURLY, getDatePartitionKey());
 
     assertEquals(timeline1Temp.getClass(), EndsAndHolesPartitionTimeline.class);
     assertEquals(timeline2Temp.getClass(), StoreAllPartitionTimeline.class);
@@ -1233,32 +1105,25 @@ public class TestCubeMetastoreClient {
 
     assertSameTimelines(factName, storages, HOURLY, partColNames);
 
-    assertTimeline(timelineDt, timelineDtC2, HOURLY, now, now);
-    assertTimeline(timelineEt, timelineEtC2, HOURLY, now, nowPlus1);
-    assertTimeline(timelineIt, timelineItC2, HOURLY, now, now);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, 0, 0);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, 0, 1);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, 0, 0);
 
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
-      TestCubeMetastoreClient.getDatePartitionKey()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, itPart.getName()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c2, etPart.getName()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, getDatePartitionKey()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, itPart.getName()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c2, etPart.getName()));
 
     assertNoPartitionNamedLatest(c1TableName, partColNames);
     assertNoPartitionNamedLatest(c2TableName, partColNames);
 
-    Map<String, Date> timeParts4 = new HashMap<String, Date>();
-    timeParts4.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
-    timeParts4.put(itPart.getName(), nowPlus1);
-    timeParts4.put(etPart.getName(), nowMinus1);
-    final StoragePartitionDesc partSpec4 = new StoragePartitionDesc(cubeFact.getName(), timeParts4, null,
-      HOURLY);
+    Map<String, Date> timeParts4 = getTimePartitionByOffsets(getDatePartitionKey(), 0, itPart.getName(), 1,
+      etPart.getName(), -1);
+    final StoragePartitionDesc partSpec4 = new StoragePartitionDesc(cubeFact.getName(), timeParts4, null, HOURLY);
 
 
-    Map<String, Date> timeParts5 = new HashMap<String, Date>();
-    timeParts5.put(TestCubeMetastoreClient.getDatePartitionKey(), nowPlus1);
-    timeParts5.put(itPart.getName(), nowMinus1);
-    timeParts5.put(etPart.getName(), nowMinus2);
-    final StoragePartitionDesc partSpec5 = new StoragePartitionDesc(cubeFact.getName(), timeParts5, null,
-      HOURLY);
+    Map<String, Date> timeParts5 = getTimePartitionByOffsets(getDatePartitionKey(), 1, itPart.getName(), -1,
+      etPart.getName(), -2);
+    final StoragePartitionDesc partSpec5 = new StoragePartitionDesc(cubeFact.getName(), timeParts5, null, HOURLY);
 
     client.addPartitions(Arrays.asList(partSpec4, partSpec5), c1);
     client.addPartitions(Arrays.asList(partSpec4, partSpec5), c2);
@@ -1268,21 +1133,17 @@ public class TestCubeMetastoreClient {
 
     assertSameTimelines(factName, storages, HOURLY, partColNames);
 
-    assertTimeline(timelineDt, timelineDtC2, HOURLY, now, nowPlus1);
-    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus2, nowPlus1);
-    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus1, nowPlus1);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, 0, 1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, -2, 1);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, -1, 1);
 
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
-      TestCubeMetastoreClient.getDatePartitionKey()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, itPart.getName()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, etPart.getName()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, getDatePartitionKey()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, itPart.getName()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, etPart.getName()));
 
-    Map<String, Date> timeParts6 = new HashMap<String, Date>();
-    timeParts6.put(TestCubeMetastoreClient.getDatePartitionKey(), nowMinus2);
-    timeParts6.put(itPart.getName(), nowMinus1);
-    timeParts6.put(etPart.getName(), nowMinus2);
-    final StoragePartitionDesc partSpec6 = new StoragePartitionDesc(cubeFact.getName(), timeParts6, null,
-      HOURLY);
+    Map<String, Date> timeParts6 = getTimePartitionByOffsets(getDatePartitionKey(), -2, itPart.getName(), -1,
+      etPart.getName(), -2);
+    final StoragePartitionDesc partSpec6 = new StoragePartitionDesc(cubeFact.getName(), timeParts6, null, HOURLY);
 
     client.addPartition(partSpec6, c1);
     client.addPartition(partSpec6, c2);
@@ -1292,21 +1153,17 @@ public class TestCubeMetastoreClient {
 
     assertSameTimelines(factName, storages, HOURLY, partColNames);
 
-    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus2, nowPlus1, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus2, nowPlus1);
-    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus1, nowPlus1);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, -2, 1, -1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, -2, 1);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, -1, 1);
 
-    Map<String, Date> timeParts7 = new HashMap<String, Date>();
-    timeParts7.put(TestCubeMetastoreClient.getDatePartitionKey(), nowMinus5);
-    timeParts7.put(itPart.getName(), nowMinus5);
-    timeParts7.put(etPart.getName(), nowMinus5);
-    final StoragePartitionDesc partSpec7 = new StoragePartitionDesc(cubeFact.getName(), timeParts7, null,
-      HOURLY);
+    Map<String, Date> timeParts7 = getTimePartitionByOffsets(getDatePartitionKey(), -5, itPart.getName(), -5,
+      etPart.getName(), -5);
+    final StoragePartitionDesc partSpec7 = new StoragePartitionDesc(cubeFact.getName(), timeParts7, null, HOURLY);
 
     client.addPartition(partSpec7, c1);
     client.addPartition(partSpec7, c2);
 
-
     List<Partition> c1Parts = client.getAllParts(c1TableName);
     List<Partition> c2Parts = client.getAllParts(c2TableName);
 
@@ -1330,70 +1187,67 @@ public class TestCubeMetastoreClient {
 
     assertSameTimelines(factName, storages, HOURLY, partColNames);
 
-    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3);
-    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3, nowMinus2);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, -5, 1, -4, -3, -1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, -5, 1, -4, -3);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, -5, 1, -4, -3, -2);
 
     assertNoPartitionNamedLatest(c1TableName, partColNames);
     assertSameTimelines(factName, storages, HOURLY, partColNames);
     assertEquals(Hive.get(client.getConf()).getTable(c1TableName).getParameters().get(
-      MetastoreUtil.getPartitionTimelineCachePresenceKey()), "true");
+      getPartitionTimelineCachePresenceKey()), "true");
     assertEquals(Hive.get(client.getConf()).getTable(c2TableName).getParameters().get(
-      MetastoreUtil.getPartitionTimelineCachePresenceKey()), "true");
+      getPartitionTimelineCachePresenceKey()), "true");
 
     // alter tables and see timeline still exists
     client.alterCubeFactTable(factName, cubeFact, storageTables);
     assertSameTimelines(factName, storages, HOURLY, partColNames);
     assertEquals(Hive.get(client.getConf()).getTable(c1TableName).getParameters().get(
-      MetastoreUtil.getPartitionTimelineCachePresenceKey()), "true");
+      getPartitionTimelineCachePresenceKey()), "true");
     assertEquals(Hive.get(client.getConf()).getTable(c2TableName).getParameters().get(
-      MetastoreUtil.getPartitionTimelineCachePresenceKey()), "true");
+      getPartitionTimelineCachePresenceKey()), "true");
 
 
     client.dropPartition(cubeFact.getName(), c1, timeParts5, null, HOURLY);
     client.dropPartition(cubeFact.getName(), c2, timeParts5, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 6);
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
-      TestCubeMetastoreClient.getDatePartitionKey()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, itPart.getName()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, etPart.getName()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, getDatePartitionKey()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, itPart.getName()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, etPart.getName()));
 
     assertNoPartitionNamedLatest(c1TableName, partColNames);
     assertSameTimelines(factName, storages, HOURLY, partColNames);
 
-    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus5, now, nowMinus4, nowMinus3, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3);
-    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3, nowMinus2);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, -5, 0, -4, -3, -1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, -5, 1, -4, -3);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, -5, 1, -4, -3, -2);
 
 
     client.dropPartition(cubeFact.getName(), c1, timeParts7, null, HOURLY);
     client.dropPartition(cubeFact.getName(), c2, timeParts7, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 5);
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
-      TestCubeMetastoreClient.getDatePartitionKey()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, itPart.getName()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, etPart.getName()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, getDatePartitionKey()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, itPart.getName()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, etPart.getName()));
 
     assertNoPartitionNamedLatest(c1TableName, partColNames);
     assertSameTimelines(factName, storages, HOURLY, partColNames);
-    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus2, now, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus2, nowPlus1);
-    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus1, nowPlus1);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, -2, 0, -1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, -2, 1);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, -1, 1);
 
 
     client.dropPartition(cubeFact.getName(), c1, timeParts2, nonTimeSpec, HOURLY);
     client.dropPartition(cubeFact.getName(), c2, timeParts2, nonTimeSpec, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 4);
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
-      TestCubeMetastoreClient.getDatePartitionKey()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, itPart.getName()));
-    Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, etPart.getName()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, getDatePartitionKey()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, itPart.getName()));
+    assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, etPart.getName()));
 
     assertNoPartitionNamedLatest(c1TableName, partColNames);
     assertSameTimelines(factName, storages, HOURLY, partColNames);
-    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus2, now, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus2, now);
-    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus1, nowPlus1);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, -2, 0, -1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, -2, 0);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, -1, 1);
 
     client.dropPartition(cubeFact.getName(), c1, timeParts4, null, HOURLY);
     client.dropPartition(cubeFact.getName(), c2, timeParts4, null, HOURLY);
@@ -1401,28 +1255,28 @@ public class TestCubeMetastoreClient {
 
     assertNoPartitionNamedLatest(c1TableName, partColNames);
     assertSameTimelines(factName, storages, HOURLY, partColNames);
-    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus2, now, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus2, now, nowMinus1);
-    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus1, now);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, -2, 0, -1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, -2, 0, -1);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, -1, 0);
     client.dropPartition(cubeFact.getName(), c1, timeParts3, nonTimeSpec, HOURLY);
     client.dropPartition(cubeFact.getName(), c2, timeParts3, nonTimeSpec, HOURLY);
     assertSameTimelines(factName, storages, HOURLY, partColNames);
-    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus2, now, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus2, now, nowMinus1);
-    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus1, now);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, -2, 0, -1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, -2, 0, -1);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, -1, 0);
 
     client.dropPartition(cubeFact.getName(), c1, timeParts6, null, HOURLY);
     client.dropPartition(cubeFact.getName(), c2, timeParts6, null, HOURLY);
     assertSameTimelines(factName, storages, HOURLY, partColNames);
-    assertTimeline(timelineDt, timelineDtC2, HOURLY, now, now);
-    assertTimeline(timelineEt, timelineEtC2, HOURLY, now, now);
-    assertTimeline(timelineIt, timelineItC2, HOURLY, now, now);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, 0, 0);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, 0, 0);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, 0, 0);
     client.dropPartition(cubeFact.getName(), c1, timeParts1, null, HOURLY);
     client.dropPartition(cubeFact.getName(), c2, timeParts1, null, HOURLY);
     assertSameTimelines(factName, storages, HOURLY, partColNames);
-    Assert.assertTrue(timelineDt.isEmpty());
-    Assert.assertTrue(timelineEt.isEmpty());
-    Assert.assertTrue(timelineIt.isEmpty());
+    assertTrue(timelineDt.isEmpty());
+    assertTrue(timelineEt.isEmpty());
+    assertTrue(timelineIt.isEmpty());
 
   }
 
@@ -1438,7 +1292,16 @@ public class TestCubeMetastoreClient {
         "complete name differs at element " + i);
     }
   }
-
+  private void assertTimeline(EndsAndHolesPartitionTimeline endsAndHolesPartitionTimeline,
+    StoreAllPartitionTimeline storeAllPartitionTimeline, UpdatePeriod updatePeriod,
+    int firstOffset, int latestOffset, int... holeOffsets) throws LensException {
+    Date[] holeDates = new Date[holeOffsets.length];
+    for(int i = 0; i < holeOffsets.length; i++) {
+      holeDates[i] = getDateWithOffset(holeOffsets[i]);
+    }
+    assertTimeline(endsAndHolesPartitionTimeline, storeAllPartitionTimeline, updatePeriod,
+      getDateWithOffset(firstOffset), getDateWithOffset(latestOffset), holeDates);
+  }
   private void assertTimeline(EndsAndHolesPartitionTimeline endsAndHolesPartitionTimeline,
     StoreAllPartitionTimeline storeAllPartitionTimeline, UpdatePeriod updatePeriod,
     Date first, Date latest, Date... holes) throws LensException {
@@ -1448,10 +1311,10 

<TRUNCATED>

[12/50] [abbrv] lens git commit: LENS-865: Add/delete partition throws NPE when a part col doesn't exist in the table

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/e5691d8d/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java b/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
index 0748d5b..fb937c6 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
@@ -26,13 +26,17 @@ import javax.ws.rs.*;
 import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBElement;
 
-import org.apache.lens.api.*;
+import org.apache.lens.api.APIResult;
 import org.apache.lens.api.APIResult.*;
+import org.apache.lens.api.DateTime;
+import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.StringList;
 import org.apache.lens.api.metastore.*;
 import org.apache.lens.server.LensServices;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.metastore.CubeMetastoreService;
 
+import org.apache.commons.lang.NotImplementedException;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 
@@ -52,11 +56,11 @@ import lombok.extern.slf4j.Slf4j;
 public class MetastoreResource {
   public static final ObjectFactory X_CUBE_OBJECT_FACTORY = new ObjectFactory();
 
-  public CubeMetastoreService getSvc() {
+  public static CubeMetastoreService getSvc() {
     return LensServices.get().getService(CubeMetastoreService.NAME);
   }
 
-  private void checkSessionId(LensSessionHandle sessionHandle) {
+  private static void checkSessionId(LensSessionHandle sessionHandle) {
     if (sessionHandle == null) {
       throw new BadRequestException("Invalid session handle");
     }
@@ -75,6 +79,154 @@ public class MetastoreResource {
     checkNonNullArgs("One partition is null", partitions.getPartition().toArray());
   }
 
+  private static LensException processLensException(LensException exc) {
+    if (exc != null) {
+      exc.buildLensErrorTO(LensServices.get().getErrorCollection());
+    }
+    return exc;
+  }
+
+  public enum Entity {
+    DATABASE {
+      @Override
+      public List<String> doGetAll(LensSessionHandle sessionHandle) throws LensException {
+        return getSvc().getAllDatabases(sessionHandle);
+      }
+
+      @Override
+      public void doDelete(LensSessionHandle sessionid, String entityName, Boolean cascade) throws LensException {
+        if (cascade == null) {
+          throw new NotImplementedException();
+        } else {
+          getSvc().dropDatabase(sessionid, entityName, cascade);
+        }
+      }
+
+    }, STORAGE {
+      @Override
+      public List<String> doGetAll(LensSessionHandle sessionid) throws LensException {
+        return getSvc().getAllStorageNames(sessionid);
+      }
+
+      @Override
+      public void doDelete(LensSessionHandle sessionid, String entityName, Boolean cascade) throws LensException {
+        if (cascade == null) {
+          getSvc().dropStorage(sessionid, entityName);
+        } else {
+          throw new NotImplementedException();
+        }
+      }
+    }, CUBE {
+      @Override
+      public List<String> doGetAll(LensSessionHandle sessionHandle) throws LensException {
+        return getSvc().getAllCubeNames(sessionHandle);
+      }
+
+      @Override
+      public void doDelete(LensSessionHandle sessionid, String entityName, Boolean cascade) throws LensException {
+        if (cascade == null) {
+          getSvc().dropCube(sessionid, entityName);
+        } else {
+          throw new NotImplementedException();
+        }
+      }
+    }, FACT {
+      @Override
+      public List<String> doGetAll(LensSessionHandle sessionid) throws LensException {
+        return getSvc().getAllFactNames(sessionid, null);
+      }
+
+      @Override
+      public void doDelete(LensSessionHandle sessionid, String entityName, Boolean cascade) throws LensException {
+        if (cascade == null) {
+          throw new NotImplementedException();
+        } else {
+          getSvc().dropFactTable(sessionid, entityName, cascade);
+        }
+      }
+    }, DIMENSION {
+      @Override
+      public List<String> doGetAll(LensSessionHandle sessionid) throws LensException {
+        return getSvc().getAllDimensionNames(sessionid);
+      }
+
+      @Override
+      public void doDelete(LensSessionHandle sessionid, String entityName, Boolean cascade) throws LensException {
+        if (cascade == null) {
+          getSvc().dropDimension(sessionid, entityName);
+        } else {
+          throw new NotImplementedException();
+        }
+      }
+    }, DIMTABLE {
+      @Override
+      public List<String> doGetAll(LensSessionHandle sessionid) throws LensException {
+        return getSvc().getAllDimTableNames(sessionid, null);
+      }
+
+      @Override
+      public void doDelete(LensSessionHandle sessionid, String entityName, Boolean cascade) throws LensException {
+        if (cascade == null) {
+          throw new NotImplementedException();
+        } else {
+          getSvc().dropDimensionTable(sessionid, entityName, cascade);
+        }
+      }
+    };
+
+    public abstract List<String> doGetAll(LensSessionHandle sessionid) throws LensException;
+
+    public abstract void doDelete(LensSessionHandle sessionid, String entityName, Boolean cascade) throws LensException;
+
+    public StringList getAll(LensSessionHandle sessionid) {
+      checkSessionId(sessionid);
+      List<String> allNames;
+      try {
+        allNames = doGetAll(sessionid);
+      } catch (LensException e) {
+        throw new WebApplicationException(e);
+      }
+      return new StringList(allNames);
+    }
+
+    public APIResult delete(LensSessionHandle sessionid, String entityName, Boolean cascade) {
+      log.info("Drop {} {} cascade: {}", name(), entityName, cascade);
+      checkSessionId(sessionid);
+      try {
+        doDelete(sessionid, entityName, cascade);
+      } catch (LensException e) {
+        checkTableNotFound(e, entityName);
+        log.error("Error droping {} {}", name(), entityName, e);
+        return failure(processLensException(e));
+      }
+      return success();
+    }
+
+    public APIResult delete(LensSessionHandle sessionid, String entityName) {
+      return delete(sessionid, entityName, null);
+    }
+
+    public APIResult deleteAll(LensSessionHandle sessionid, Boolean cascade) {
+      checkSessionId(sessionid);
+      List<String> entities;
+      int numDeleted = 0;
+      int numExpected = 0;
+      LensException exc = null;
+      try {
+        entities = doGetAll(sessionid);
+        numExpected = entities.size();
+        for (String entity : entities) {
+          doDelete(sessionid, entity, cascade);
+          numDeleted++;
+        }
+      } catch (LensException e) {
+        log.error("Error deleting cubes:", e);
+        exc = e;
+      }
+      return successOrPartialOrFailure(numDeleted, numExpected, processLensException(exc));
+    }
+  }
+
   /**
    * API to know if metastore service is up and running
    *
@@ -96,14 +248,7 @@ public class MetastoreResource {
   @GET
   @Path("databases")
   public StringList getAllDatabases(@QueryParam("sessionid") LensSessionHandle sessionid) {
-    checkSessionId(sessionid);
-    List<String> allNames;
-    try {
-      allNames = getSvc().getAllDatabases(sessionid);
-    } catch (LensException e) {
-      throw new WebApplicationException(e);
-    }
-    return new StringList(allNames);
+    return Entity.DATABASE.getAll(sessionid);
   }
 
   /**
@@ -143,7 +288,7 @@ public class MetastoreResource {
       return success();
     } catch (LensException e) {
       log.error("Error changing current database", e);
-      return failure(e);
+      return failure(processLensException(e));
     }
   }
 
@@ -161,15 +306,7 @@ public class MetastoreResource {
   public APIResult dropDatabase(@QueryParam("sessionid") LensSessionHandle sessionid,
     @PathParam("dbName") String dbName,
     @QueryParam("cascade") boolean cascade) {
-    checkSessionId(sessionid);
-    log.info("Drop database {} cascade?{}", dbName, cascade);
-    try {
-      getSvc().dropDatabase(sessionid, dbName, cascade);
-      return success();
-    } catch (LensException e) {
-      log.error("Error dropping {}", dbName, e);
-      return failure(e);
-    }
+    return Entity.DATABASE.delete(sessionid, dbName, cascade);
   }
 
   /**
@@ -188,13 +325,12 @@ public class MetastoreResource {
     String dbName) {
     checkSessionId(sessionid);
     log.info("Create database {} Ignore Existing? {}", dbName, ignoreIfExisting);
-
     try {
       getSvc().createDatabase(sessionid, dbName, ignoreIfExisting);
       return success();
     } catch (LensException e) {
       log.error("Error creating database {}", dbName, e);
-      return failure(e);
+      return failure(processLensException(e));
     }
   }
 
@@ -265,15 +401,16 @@ public class MetastoreResource {
     @QueryParam("type") @DefaultValue("all") String cubeTypes) {
     checkSessionId(sessionid);
     try {
-      if (cubeTypes.equals("all")) {
+      switch (cubeTypes) {
+      case "all":
         return new StringList(getSvc().getAllCubeNames(sessionid));
-      } else if (cubeTypes.equals("base")) {
+      case "base":
         return new StringList(getSvc().getAllBaseCubeNames(sessionid));
-      } else if (cubeTypes.equals("derived")) {
+      case "derived":
         return new StringList(getSvc().getAllDerivedCubeNames(sessionid));
-      } else if (cubeTypes.equals("queryable")) {
+      case "queryable":
         return new StringList(getSvc().getAllQueryableCubeNames(sessionid));
-      } else {
+      default:
         throw new BadRequestException("Invalid type " + cubeTypes + " Accepted"
           + " values are 'all' or 'base' or 'derived' or 'queryable'");
       }
@@ -294,20 +431,7 @@ public class MetastoreResource {
   @Path("cubes")
   public APIResult deleteAllCubes(@QueryParam("sessionid") LensSessionHandle sessionid) {
     checkSessionId(sessionid);
-    List<String> cubeNames = null;
-    LensException exc = null;
-    int numDeleted = 0;
-    try {
-      cubeNames = getSvc().getAllCubeNames(sessionid);
-      for (String cubeName : cubeNames) {
-        getSvc().dropCube(sessionid, cubeName);
-        numDeleted++;
-      }
-    } catch (LensException e) {
-      log.error("Error deleting cubes:", e);
-      exc = e;
-    }
-    return successOrPartialOrFailure(numDeleted, cubeNames.size(), exc);
+    return Entity.CUBE.deleteAll(sessionid, null);
   }
 
   /**
@@ -330,13 +454,13 @@ public class MetastoreResource {
         checkTableNotFound(e, ((XDerivedCube) cube).getParent());
       }
       log.error("Error creating cube {}", cube.getName(), e);
-      return failure(e);
+      return failure(processLensException(e));
     }
     return success();
   }
 
 
-  private void checkTableNotFound(LensException e, String table) {
+  private static void checkTableNotFound(LensException e, String table) {
     List<String> messages = Lists.newArrayList();
     messages.add(e.getMessage());
     if (e.getCause() instanceof HiveException) {
@@ -372,7 +496,7 @@ public class MetastoreResource {
       }
       checkTableNotFound(e, cube.getName());
       log.error("Error updating cube {}", cube.getName(), e);
-      return failure(e);
+      return failure(processLensException(e));
     }
     return success();
   }
@@ -410,15 +534,7 @@ public class MetastoreResource {
   @Path("/cubes/{cubeName}")
   public APIResult dropCube(@QueryParam("sessionid") LensSessionHandle sessionid,
     @PathParam("cubeName") String cubeName) {
-    checkSessionId(sessionid);
-    try {
-      getSvc().dropCube(sessionid, cubeName);
-    } catch (LensException e) {
-      checkTableNotFound(e, cubeName);
-      log.error("Error droping cube {}", cubeName, e);
-      return failure(e);
-    }
-    return success();
+    return Entity.CUBE.delete(sessionid, cubeName, null);
   }
 
   /**
@@ -432,12 +548,7 @@ public class MetastoreResource {
   @Path("storages")
   public StringList getAllStorages(@QueryParam("sessionid") LensSessionHandle sessionid) {
     checkSessionId(sessionid);
-    try {
-      return new StringList(getSvc().getAllStorageNames(sessionid));
-    } catch (LensException e) {
-      log.error("Error getting storages", e);
-      throw new WebApplicationException(e);
-    }
+    return Entity.STORAGE.getAll(sessionid);
   }
 
   /**
@@ -456,7 +567,7 @@ public class MetastoreResource {
       getSvc().createStorage(sessionid, storage);
     } catch (LensException e) {
       log.error("Error creating storage {}", storage.getName(), e);
-      return failure(e);
+      return failure(processLensException(e));
     }
     return success();
   }
@@ -471,22 +582,7 @@ public class MetastoreResource {
   @DELETE
   @Path("storages")
   public APIResult deleteAllStorages(@QueryParam("sessionid") LensSessionHandle sessionid) {
-    checkSessionId(sessionid);
-    List<String> storageNames = null;
-    LensException exc = null;
-    int numDeleted = 0;
-    try {
-      storageNames = getSvc().getAllStorageNames(sessionid);
-      for (String storageName : storageNames) {
-        getSvc().dropStorage(sessionid, storageName);
-        numDeleted++;
-      }
-    } catch (LensException e) {
-      log.error("Error deleting storages:", e);
-      exc = e;
-    }
-    assert (numDeleted == storageNames.size() || exc != null);
-    return successOrPartialOrFailure(numDeleted, storageNames.size(), exc);
+    return Entity.STORAGE.deleteAll(sessionid, null);
   }
 
   /**
@@ -508,7 +604,7 @@ public class MetastoreResource {
     } catch (LensException e) {
       checkTableNotFound(e, storageName);
       log.error("Error updating storage {}", storageName, e);
-      return failure(e);
+      return failure(processLensException(e));
     }
     return success();
   }
@@ -545,15 +641,7 @@ public class MetastoreResource {
   @Path("/storages/{storage}")
   public APIResult dropStorage(@QueryParam("sessionid") LensSessionHandle sessionid,
     @PathParam("storage") String storageName) {
-    checkSessionId(sessionid);
-    try {
-      getSvc().dropStorage(sessionid, storageName);
-    } catch (LensException e) {
-      checkTableNotFound(e, storageName);
-      log.error("Error dropping storage {}", storageName, e);
-      return failure(e);
-    }
-    return success();
+    return Entity.STORAGE.delete(sessionid, storageName, null);
   }
 
   /**
@@ -566,13 +654,7 @@ public class MetastoreResource {
   @GET
   @Path("dimensions")
   public StringList getAllDimensionNames(@QueryParam("sessionid") LensSessionHandle sessionid) {
-    checkSessionId(sessionid);
-    try {
-      return new StringList(getSvc().getAllDimensionNames(sessionid));
-    } catch (LensException e) {
-      log.error("Error getting dimensions", e);
-      throw new WebApplicationException(e);
-    }
+    return Entity.DIMENSION.getAll(sessionid);
   }
 
   /**
@@ -591,7 +673,7 @@ public class MetastoreResource {
       getSvc().createDimension(sessionid, dimension);
     } catch (LensException e) {
       log.error("Error creating dimension {}", dimension.getName(), e);
-      return failure(e);
+      return failure(processLensException(e));
     }
     return success();
   }
@@ -606,21 +688,7 @@ public class MetastoreResource {
   @DELETE
   @Path("dimensions")
   public APIResult deleteAllDimensions(@QueryParam("sessionid") LensSessionHandle sessionid) {
-    checkSessionId(sessionid);
-    List<String> dimNames = null;
-    int numDeleted = 0;
-    LensException exc = null;
-    try {
-      dimNames = getSvc().getAllDimensionNames(sessionid);
-      for (String dimName : dimNames) {
-        getSvc().dropStorage(sessionid, dimName);
-        numDeleted++;
-      }
-    } catch (LensException e) {
-      log.error("Error deleting dimensions:", e);
-      exc = e;
-    }
-    return successOrPartialOrFailure(numDeleted, dimNames.size(), exc);
+    return Entity.DIMENSION.deleteAll(sessionid, null);
   }
 
   /**
@@ -642,7 +710,7 @@ public class MetastoreResource {
     } catch (LensException e) {
       checkTableNotFound(e, dimName);
       log.error("Error updating dimension {}", dimName, e);
-      return failure(e);
+      return failure(processLensException(e));
     }
     return success();
   }
@@ -679,15 +747,7 @@ public class MetastoreResource {
   @Path("/dimensions/{dimName}")
   public APIResult dropDimension(@QueryParam("sessionid") LensSessionHandle sessionid,
     @PathParam("dimName") String dimName) {
-    checkSessionId(sessionid);
-    try {
-      getSvc().dropDimension(sessionid, dimName);
-    } catch (LensException e) {
-      checkTableNotFound(e, dimName);
-      log.error("Error dropping dimension {}", dimName, e);
-      return failure(e);
-    }
-    return success();
+    return Entity.DIMENSION.delete(sessionid, dimName, null);
   }
 
   /**
@@ -742,7 +802,7 @@ public class MetastoreResource {
   @Path("/facts")
   public StringList getAllFacts(@QueryParam("sessionid") LensSessionHandle sessionid) throws LensException {
     checkSessionId(sessionid);
-    return new StringList(getSvc().getAllFactNames(sessionid, null));
+    return Entity.FACT.getAll(sessionid);
   }
 
   /**
@@ -758,21 +818,7 @@ public class MetastoreResource {
   @Path("facts")
   public APIResult deleteAllFacts(@QueryParam("sessionid") LensSessionHandle sessionid,
     @DefaultValue("false") @QueryParam("cascade") boolean cascade) {
-    checkSessionId(sessionid);
-    List<String> factNames = null;
-    int numDeleted = 0;
-    LensException exc = null;
-    try {
-      factNames = getSvc().getAllCubeNames(sessionid);
-      for (String factName : factNames) {
-        getSvc().dropFactTable(sessionid, factName, cascade);
-        numDeleted++;
-      }
-    } catch (LensException e) {
-      log.error("Error deleting cubes:", e);
-      exc = e;
-    }
-    return successOrPartialOrFailure(numDeleted, factNames.size(), exc);
+    return Entity.FACT.deleteAll(sessionid, cascade);
   }
 
   /**
@@ -816,7 +862,7 @@ public class MetastoreResource {
       getSvc().createFactTable(sessionid, fact);
     } catch (LensException exc) {
       log.error("Exception creating fact:", exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }
@@ -841,7 +887,7 @@ public class MetastoreResource {
     } catch (LensException exc) {
       checkTableNotFound(exc, factName);
       log.error("Error updating fact {}", factName, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }
@@ -861,15 +907,7 @@ public class MetastoreResource {
     @PathParam("factName") String factName,
     @DefaultValue("false") @QueryParam("cascade") boolean cascade)
     throws LensException {
-    checkSessionId(sessionid);
-    try {
-      getSvc().dropFactTable(sessionid, factName, cascade);
-    } catch (LensException exc) {
-      checkTableNotFound(exc, factName);
-      log.error("Error dropping fact {}", factName, exc);
-      return failure(exc);
-    }
-    return success();
+    return Entity.FACT.delete(sessionid, factName, cascade);
   }
 
   /**
@@ -911,7 +949,7 @@ public class MetastoreResource {
     } catch (LensException exc) {
       checkTableNotFound(exc, factName);
       log.error("Error dropping storages of fact {}", factName, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }
@@ -936,7 +974,7 @@ public class MetastoreResource {
     } catch (LensException exc) {
       checkTableNotFound(exc, factName);
       log.error("Error adding storage to fact {}", factName, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }
@@ -962,7 +1000,7 @@ public class MetastoreResource {
     } catch (LensException exc) {
       checkTableNotFound(exc, factName);
       log.error("Error dropping storage of fact {}", factName, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }
@@ -1032,7 +1070,7 @@ public class MetastoreResource {
     } catch (LensException exc) {
       log.warn("Got exception while dropping partition.", exc);
       checkTableNotFound(exc, factName);
-      return partial(exc);
+      return partial(processLensException(exc));
     }
     return success();
   }
@@ -1060,7 +1098,7 @@ public class MetastoreResource {
     } catch (LensException exc) {
       checkTableNotFound(exc, factName);
       log.error("Error adding partition to storage of fact {}:{}", factName, storage, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
   }
 
@@ -1088,7 +1126,7 @@ public class MetastoreResource {
     } catch (LensException exc) {
       checkTableNotFound(exc, factName);
       log.error("Error adding partition to storage of fact {}:{}", factName, storage, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }
@@ -1117,7 +1155,7 @@ public class MetastoreResource {
     } catch (LensException exc) {
       checkTableNotFound(exc, factName);
       log.error("Error adding partition to storage of fact {}:{}", factName, storage, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
   }
 
@@ -1145,7 +1183,7 @@ public class MetastoreResource {
     } catch (LensException exc) {
       checkTableNotFound(exc, factName);
       log.error("Error adding partition to storage of fact {}:{}", factName, storage, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }
@@ -1174,7 +1212,7 @@ public class MetastoreResource {
     } catch (LensException e) {
       checkTableNotFound(e, factName);
       log.error("Error dropping partition to storage of fact {}:{}", factName, storage, e);
-      return failure(e);
+      return failure(processLensException(e));
     }
     return success();
   }
@@ -1188,7 +1226,7 @@ public class MetastoreResource {
   @GET
   @Path("/dimtables")
   public StringList getAllDims(@QueryParam("sessionid") LensSessionHandle sessionid) throws LensException {
-    return new StringList(getSvc().getAllDimTableNames(sessionid, null));
+    return Entity.DIMTABLE.getAll(sessionid);
   }
 
   /**
@@ -1208,7 +1246,7 @@ public class MetastoreResource {
       getSvc().createDimensionTable(sessionid, dimensionTable);
     } catch (LensException exc) {
       log.error("Error creating cube dimension table {}", dimensionTable.getTableName(), exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }
@@ -1232,7 +1270,7 @@ public class MetastoreResource {
     } catch (LensException exc) {
       checkTableNotFound(exc, dimTableName);
       log.error("Error updating cube dimension table {}", dimTableName, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }
@@ -1251,15 +1289,7 @@ public class MetastoreResource {
   public APIResult dropDimensionTable(@QueryParam("sessionid") LensSessionHandle sessionid,
     @PathParam("dimTableName") String dimension,
     @QueryParam("cascade") boolean cascade) {
-    checkSessionId(sessionid);
-    try {
-      getSvc().dropDimensionTable(sessionid, dimension, cascade);
-    } catch (LensException e) {
-      checkTableNotFound(e, dimension);
-      log.error("Error dropping cube dimension table {}", dimension, e);
-      return failure(e);
-    }
-    return success();
+    return Entity.DIMTABLE.delete(sessionid, dimension, cascade);
   }
 
   /**
@@ -1320,7 +1350,7 @@ public class MetastoreResource {
     } catch (LensException e) {
       checkTableNotFound(e, dimTableName);
       log.error("Error creating dimension table storage {}:{}", dimTableName, storageTbl.getStorageName(), e);
-      return failure(e);
+      return failure(processLensException(e));
     }
     return success();
   }
@@ -1360,7 +1390,7 @@ public class MetastoreResource {
     } catch (LensException exc) {
       checkTableNotFound(exc, dimTableName);
       log.error("Error dropping storages of dimension table {}", dimTableName, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }
@@ -1385,7 +1415,7 @@ public class MetastoreResource {
     } catch (LensException exc) {
       checkTableNotFound(exc, dimTableName);
       log.error("Error dropping storage of dimension table {}:{}", dimTableName, storage, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }
@@ -1434,7 +1464,7 @@ public class MetastoreResource {
       getSvc().dropPartitionFromStorageByFilter(sessionid, dimTableName, storage, filter);
     } catch (LensException exc) {
       log.error("Error dropping partition on storage of dimension table {}:{}", dimTableName, storage, exc);
-      return partial(exc);
+      return partial(processLensException(exc));
     }
     return success();
   }
@@ -1461,7 +1491,7 @@ public class MetastoreResource {
         values);
     } catch (LensException exc) {
       log.error("Error dropping partitions on storage of dimension table {}:{}", dimTableName, storage, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }
@@ -1489,7 +1519,7 @@ public class MetastoreResource {
         1);
     } catch (LensException exc) {
       log.error("Error adding partition to storage of dimension table {}:{}", dimTableName, storage, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
   }
 
@@ -1516,7 +1546,7 @@ public class MetastoreResource {
       getSvc().updatePartition(sessionid, dimTableName, storage, partition);
     } catch (LensException exc) {
       log.error("Error adding partition to storage of dimension table {}:{}", dimTableName, storage, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }
@@ -1544,7 +1574,7 @@ public class MetastoreResource {
         partitions.getPartition().size());
     } catch (LensException exc) {
       log.error("Error adding partition to storage of dimension table {}:{}", dimTableName, storage, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
   }
 
@@ -1571,7 +1601,7 @@ public class MetastoreResource {
       getSvc().updatePartitions(sessionid, dimTableName, storage, partitions);
     } catch (LensException exc) {
       log.error("Error adding partition to storage of dimension table {}:{}", dimTableName, storage, exc);
-      return failure(exc);
+      return failure(processLensException(exc));
     }
     return success();
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/e5691d8d/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
index e6a798a..5f9f639 100644
--- a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
@@ -477,9 +477,8 @@ public class TestMetastoreService extends LensJerseyTest {
       result = target.queryParam("sessionid", lensSessionId).request(
         mediaType).post(Entity.xml(cubeObjectFactory.createXCube(dcube)), APIResult.class);
       assertEquals(result.getStatus(), Status.FAILED);
-      assertEquals(result.getMessage(), "ERROR_IN_ENTITY_DEFINITION[Derived cube invalid: "
-        + "Measures [random_measure] and Dim Attributes "
-        + "[random_dim] were not present in parent cube testcube1]");
+      assertEquals(result.getMessage(), "Problem in submitting entity: Derived cube invalid: Measures "
+        + "[random_measure] and Dim Attributes [random_dim] were not present in parent cube testcube1");
       // create derived cube
       dcube = createDerivedCube("testderived", "testCube1", false);
       result = target.queryParam("sessionid", lensSessionId).request(
@@ -856,9 +855,8 @@ public class TestMetastoreService extends LensJerseyTest {
       result =
         target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
       assertEquals(result.getStatus(), Status.FAILED);
-      assertEquals(result.getMessage(), "ERROR_IN_ENTITY_DEFINITION[Derived cube invalid: "
-        + "Measures [random_measure] and Dim Attributes "
-        + "[random_dim] were not present in parent cube test_update]");
+      assertEquals(result.getMessage(), "Problem in submitting entity: Derived cube invalid: Measures "
+        + "[random_measure] and Dim Attributes [random_dim] were not present in parent cube test_update");
       dcube = createDerivedCube("test_update_derived", cubeName, false);
       // Create this cube first
       element = cubeObjectFactory.createXCube(dcube);
@@ -1960,14 +1958,15 @@ public class TestMetastoreService extends LensJerseyTest {
   private XPartition createPartition(String cubeTableName, Date partDate) {
     return createPartition(cubeTableName, partDate, "dt");
   }
-
-  private XPartition createPartition(String cubeTableName, Date partDate, final String timeDimension) {
-
+  private XTimePartSpecElement createTimePartSpecElement(Date partDate, String timeDimension) {
     XTimePartSpecElement timePart = cubeObjectFactory.createXTimePartSpecElement();
     timePart.setKey(timeDimension);
     timePart.setValue(JAXBUtils.getXMLGregorianCalendar(HOURLY.truncate(partDate)));
+    return timePart;
+  }
+  private XPartition createPartition(String cubeTableName, Date partDate, final String timeDimension) {
 
-    return createPartition(cubeTableName, Arrays.asList(timePart));
+    return createPartition(cubeTableName, Arrays.asList(createTimePartSpecElement(partDate, timeDimension)));
   }
 
   private XPartition createPartition(String cubeTableName, final List<XTimePartSpecElement> timePartSpecs) {
@@ -2116,8 +2115,18 @@ public class TestMetastoreService extends LensJerseyTest {
         .post(null);
       Assert.assertEquals(resp.getStatus(), 400);
 
-      // Add a partition
+      // Add wrong partition
       final Date partDate = new Date();
+      XPartition xp2 = createPartition(table, partDate);
+      xp2.getTimePartitionSpec().getPartSpecElement()
+        .add(createTimePartSpecElement(partDate, "non_existant_time_part"));
+      partAddResult = target().path("metastore/facts/").path(table).path("storages/S2/partition")
+        .queryParam("sessionid", lensSessionId).request(mediaType)
+        .post(Entity.xml(cubeObjectFactory.createXPartition(xp2)), APIResult.class);
+      assertEquals(partAddResult.getStatus(), Status.FAILED);
+      assertEquals(partAddResult.getMessage(), "No timeline found for fact=testFactStoragePartitions, storage=S2, "
+        + "update period=HOURLY, partition column=non_existant_time_part.");
+      // Add a partition
       XPartition xp = createPartition(table, partDate);
       partAddResult = target().path("metastore/facts/").path(table).path("storages/S2/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
@@ -2205,7 +2214,14 @@ public class TestMetastoreService extends LensJerseyTest {
         .post(Entity.xml(cubeObjectFactory.createXPartition(xp)));
       assertXMLError(resp);
 
-
+      // Try adding in batch with one partition being wrong wrt partition column.
+      partAddResult = target().path("metastore/facts/").path(table).path("storages/S2/partitions")
+        .queryParam("sessionid", lensSessionId).request(mediaType)
+        .post(Entity.xml(cubeObjectFactory.createXPartitionList(toXPartitionList(xp2))),
+          APIResult.class);
+      assertEquals(partAddResult.getStatus(), Status.FAILED);
+      assertEquals(partAddResult.getMessage(), "No timeline found for fact=testFactStoragePartitions, storage=S2, "
+        + "update period=HOURLY, partition column=non_existant_time_part.");
       // Add in batch
       partAddResult = target().path("metastore/facts/").path(table).path("storages/S2/partitions")
         .queryParam("sessionid", lensSessionId).request(mediaType)


[36/50] [abbrv] lens git commit: LENS-885: Cleanup of Cube test cases

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index e5dbde7..c6ce6ad 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -19,9 +19,9 @@
 
 package org.apache.lens.cube.metadata;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.metadata.MetastoreUtil.*;
 import static org.apache.lens.cube.metadata.UpdatePeriod.*;
-import static org.apache.lens.cube.parse.CubeTestSetup.DateOffsetProvider;
 import static org.apache.lens.server.api.util.LensUtil.getHashMap;
 
 import static org.testng.Assert.*;
@@ -35,7 +35,6 @@ import org.apache.lens.cube.metadata.timeline.EndsAndHolesPartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.PartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.StoreAllPartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.TestPartitionTimelines;
-import org.apache.lens.cube.parse.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.util.LensUtil;
 
@@ -100,11 +99,7 @@ public class TestCubeMetastoreClient {
   private static Set<ExprColumn> cubeExpressions = new HashSet<>();
   private static Set<JoinChain> joinChains = new HashSet<>();
   private static Set<ExprColumn> dimExpressions = new HashSet<>();
-  private static DateOffsetProvider dateOffsetProvider = new DateOffsetProvider(HOURLY);
 
-  public static Date getDateWithOffset(int i) {
-    return dateOffsetProvider.get(i);
-  }
 
   /**
    * Get the date partition as field schema
@@ -127,7 +122,7 @@ public class TestCubeMetastoreClient {
   private static HashMap<String, Date> getTimePartitionByOffsets(Object... args) {
     for (int i = 1; i < args.length; i += 2) {
       if (args[i] instanceof Integer) {
-        args[i] = getDateWithOffset((Integer) args[i]);
+        args[i] = getDateWithOffset(HOURLY, (Integer) args[i]);
       }
     }
     return getHashMap(args);
@@ -169,13 +164,13 @@ public class TestCubeMetastoreClient {
       new FieldSchema("msr4", "bigint", "fourth measure"), "Measure4", null, "COUNT", null));
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msrstarttime", "int", "measure with start time"),
-      "Measure With Starttime", null, null, null, getDateWithOffset(0), null, null, 0.0, 999999.0));
+      "Measure With Starttime", null, null, null, NOW, null, null, 0.0, 999999.0));
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msrendtime", "float", "measure with end time"),
-      "Measure With Endtime", null, "SUM", "RS", getDateWithOffset(0), getDateWithOffset(0), null));
+      "Measure With Endtime", null, "SUM", "RS", NOW, NOW, null));
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msrcost", "double", "measure with cost"), "Measure With cost",
-      null, "MAX", null, getDateWithOffset(0), getDateWithOffset(0), 100.0));
+      null, "MAX", null, NOW, NOW, 100.0));
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msrcost2", "bigint", "measure with cost"),
       "Measure With cost2", null, "MAX", null, null, null, 100.0, 0.0, 999999999999999999999999999.0));
@@ -234,12 +229,12 @@ public class TestCubeMetastoreClient {
 
     List<CubeDimAttribute> locationHierarchyWithStartTime = new ArrayList<>();
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("zipcode2", "int", "zip"),
-      "Zip refer2", new TableReference("zipdim", "zipcode"), getDateWithOffset(0), getDateWithOffset(0),
+      "Zip refer2", new TableReference("zipdim", "zipcode"), NOW, NOW,
       100.0, true, 1000L));
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("cityid2", "int", "city"),
-      "City refer2", new TableReference("citydim", "id"), getDateWithOffset(0), null, null));
+      "City refer2", new TableReference("citydim", "id"), NOW, null, null));
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("stateid2", "int", "state"),
-      "state refer2", new TableReference("statedim", "id"), getDateWithOffset(0), null, 100.0));
+      "state refer2", new TableReference("statedim", "id"), NOW, null, 100.0));
     locationHierarchyWithStartTime.add(new ReferencedDimAtrribute(new FieldSchema("countryid2", "int", "country"),
       "Country refer2", new TableReference("countrydim", "id"), null, null, null));
     locationHierarchyWithStartTime.add(new BaseDimAttribute(new FieldSchema("regionname2", "string", "region"),
@@ -248,9 +243,10 @@ public class TestCubeMetastoreClient {
     cubeDimensions
       .add(new HierarchicalDimAttribute("location2", "localtion hierarchy2", locationHierarchyWithStartTime));
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("dim1startTime", "string", "basedim"),
-      "Dim With starttime", getDateWithOffset(0), null, 100.0));
+      "Dim With starttime", NOW, null, 100.0));
     cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim2start", "string", "ref dim"),
-      "Dim2 with starttime", new TableReference("testdim2", "id"), getDateWithOffset(0), getDateWithOffset(0), 100.0));
+      "Dim2 with starttime", new TableReference("testdim2", "id"),
+      NOW, NOW, 100.0));
 
     List<TableReference> multiRefs = new ArrayList<>();
     multiRefs.add(new TableReference("testdim2", "id"));
@@ -260,12 +256,12 @@ public class TestCubeMetastoreClient {
     cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim3", "string", "multi ref dim"), "Dim3 refer",
       multiRefs));
     cubeDimensions.add(new ReferencedDimAtrribute(new FieldSchema("dim3start", "string", "multi ref dim"),
-      "Dim3 with starttime", multiRefs, getDateWithOffset(0), null, 100.0));
+      "Dim3 with starttime", multiRefs, NOW, null, 100.0));
 
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("region", "string", "region dim"), "region", null, null,
       null, null, regions));
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("regionstart", "string", "region dim"),
-      "Region with starttime", getDateWithOffset(0), null, 100.0, null, regions));
+      "Region with starttime", NOW, null, 100.0, null, regions));
     JoinChain zipCity = new JoinChain("cityFromZip", "Zip City", "zip city desc");
     List<TableReference> chain = new ArrayList<>();
     chain.add(new TableReference(cubeName, "zipcode"));
@@ -1317,13 +1313,12 @@ public class TestCubeMetastoreClient {
     StoreAllPartitionTimeline storeAllPartitionTimeline, UpdatePeriod updatePeriod,
     int firstOffset, int latestOffset, int... holeOffsets) throws LensException {
     Date[] holeDates = new Date[holeOffsets.length];
-    for (int i = 0; i < holeOffsets.length; i++) {
-      holeDates[i] = getDateWithOffset(holeOffsets[i]);
+    for(int i = 0; i < holeOffsets.length; i++) {
+      holeDates[i] = getDateWithOffset(HOURLY, holeOffsets[i]);
     }
     assertTimeline(endsAndHolesPartitionTimeline, storeAllPartitionTimeline, updatePeriod,
-      getDateWithOffset(firstOffset), getDateWithOffset(latestOffset), holeDates);
+      getDateWithOffset(HOURLY, firstOffset), getDateWithOffset(HOURLY, latestOffset), holeDates);
   }
-
   private void assertTimeline(EndsAndHolesPartitionTimeline endsAndHolesPartitionTimeline,
     StoreAllPartitionTimeline storeAllPartitionTimeline, UpdatePeriod updatePeriod,
     Date first, Date latest, Date... holes) throws LensException {
@@ -1397,10 +1392,10 @@ public class TestCubeMetastoreClient {
     return values;
   }
 
-  private TimePartition[] toPartitionArray(UpdatePeriod updatePeriod, Date... dates) throws LensException {
-    TimePartition[] values = new TimePartition[dates.length];
-    for (int i = 0; i < dates.length; i++) {
-      values[i] = TimePartition.of(updatePeriod, dates[i]);
+  private TimePartition[] toPartitionArray(UpdatePeriod updatePeriod, int... offsets) throws LensException {
+    TimePartition[] values = new TimePartition[offsets.length];
+    for (int i = 0; i < offsets.length; i++) {
+      values[i] = TimePartition.of(updatePeriod, getDateWithOffset(updatePeriod, offsets[i]));
     }
     return values;
   }
@@ -1709,8 +1704,7 @@ public class TestCubeMetastoreClient {
     List<Partition> parts = client.getPartitionsByFilter(storageTableName, "dt='latest'");
     assertEquals(1, parts.size());
     assertEquals(TextInputFormat.class.getCanonicalName(), parts.get(0).getInputFormatClass().getCanonicalName());
-    assertEquals(parts.get(0).getParameters().get(getLatestPartTimestampKey("dt")),
-      HOURLY.format(getDateWithOffset(0)));
+    assertEquals(parts.get(0).getParameters().get(getLatestPartTimestampKey("dt")), getDateStringWithOffset(HOURLY, 0));
 
     client.dropPartition(cubeDim.getName(), c1, timeParts, null, HOURLY);
     assertFalse(client.dimPartitionExists(cubeDim.getName(), c1, timeParts));
@@ -1769,7 +1763,7 @@ public class TestCubeMetastoreClient {
     String storageTableName = getFactOrDimtableStorageTableName(dimName, c1);
     assertFalse(client.dimTableLatestPartitionExists(storageTableName));
 
-    Map<String, Date> timePartsNow = getHashMap(getDatePartitionKey(), getDateWithOffset(0));
+    Map<String, Date> timePartsNow = getHashMap(getDatePartitionKey(), NOW);
     StoragePartitionDesc sPartSpec0 = new StoragePartitionDesc(cubeDim.getName(), timePartsNow, null, HOURLY);
 
     client.addPartition(sPartSpec0, c1);
@@ -1782,7 +1776,7 @@ public class TestCubeMetastoreClient {
     Partition latestPart = parts.get(0);
     assertEquals(latestPart.getInputFormatClass(), TextInputFormat.class);
     assertFalse(latestPart.getCols().contains(newcol));
-    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), HOURLY.format(getDateWithOffset(0)));
+    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), getDateStringWithOffset(HOURLY, 0));
 
     // Partition with different schema
     cubeDim.alterColumn(newcol);
@@ -1805,7 +1799,7 @@ public class TestCubeMetastoreClient {
     latestPart = parts.get(0);
     assertEquals(latestPart.getInputFormatClass(), SequenceFileInputFormat.class);
     assertTrue(latestPart.getCols().contains(newcol));
-    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), HOURLY.format(getDateWithOffset(1)));
+    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), getDateStringWithOffset(HOURLY, 1));
 
     // add one more partition
     Map<String, Date> timeParts2 = getTimePartitionByOffsets(getDatePartitionKey(), 2);
@@ -1824,7 +1818,7 @@ public class TestCubeMetastoreClient {
     latestPart = parts.get(0);
     assertEquals(latestPart.getInputFormatClass(), TextInputFormat.class);
     assertTrue(latestPart.getCols().contains(newcol));
-    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), HOURLY.format(getDateWithOffset(2)));
+    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), getDateStringWithOffset(HOURLY, 2));
 
     // drop the last added partition
     client.dropPartition(cubeDim.getName(), c1, timeParts2, null, HOURLY);
@@ -1835,8 +1829,7 @@ public class TestCubeMetastoreClient {
     assertEquals(parts.size(), 1);
     latestPart = parts.get(0);
     assertEquals(latestPart.getInputFormatClass(), SequenceFileInputFormat.class);
-    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")),
-      HOURLY.format(getDateWithOffset(1)));
+    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), getDateStringWithOffset(HOURLY, 1));
     assertEquals(client.getAllParts(storageTableName).size(), 3);
 
     // drop the first partition, leaving the middle.
@@ -1848,8 +1841,7 @@ public class TestCubeMetastoreClient {
     assertEquals(parts.size(), 1);
     latestPart = parts.get(0);
     assertEquals(latestPart.getInputFormatClass(), SequenceFileInputFormat.class);
-    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")),
-      HOURLY.format(getDateWithOffset(1)));
+    assertEquals(latestPart.getParameters().get(getLatestPartTimestampKey("dt")), getDateStringWithOffset(HOURLY, 1));
     assertEquals(client.getAllParts(storageTableName).size(), 2);
 
     client.dropPartition(cubeDim.getName(), c1, timeParts1, null, HOURLY);
@@ -1892,52 +1884,52 @@ public class TestCubeMetastoreClient {
     Map<String, Date> timeParts = new HashMap<>();
     Map<String, String> nonTimeParts = new HashMap<>();
 
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(0));
+    timeParts.put(getDatePartitionKey(), NOW);
     nonTimeParts.put("region", "asia");
     StoragePartitionDesc sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, HOURLY);
     client.addPartition(sPartSpec, c3);
-    expectedLatestValues.put("asia", getDateWithOffset(0));
+    expectedLatestValues.put("asia", NOW);
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(-1));
+    timeParts.put(getDatePartitionKey(), getDateWithOffset(HOURLY, -1));
     nonTimeParts.put("region", "africa");
     sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, HOURLY);
     client.addPartition(sPartSpec, c3);
-    expectedLatestValues.put("asia", getDateWithOffset(0));
-    expectedLatestValues.put("africa", getDateWithOffset(-1));
+    expectedLatestValues.put("asia", NOW);
+    expectedLatestValues.put("africa", getDateWithOffset(HOURLY, -1));
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(1));
+    timeParts.put(getDatePartitionKey(), getDateWithOffset(HOURLY, 1));
     nonTimeParts.put("region", "africa");
     sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, HOURLY);
     client.addPartition(sPartSpec, c3);
-    expectedLatestValues.put("asia", getDateWithOffset(0));
-    expectedLatestValues.put("africa", getDateWithOffset(1));
+    expectedLatestValues.put("asia", NOW);
+    expectedLatestValues.put("africa", getDateWithOffset(HOURLY, 1));
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(3));
+    timeParts.put(getDatePartitionKey(), getDateWithOffset(HOURLY, 3));
     nonTimeParts.put("region", "asia");
     sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, HOURLY);
     client.addPartition(sPartSpec, c3);
-    expectedLatestValues.put("asia", getDateWithOffset(3));
-    expectedLatestValues.put("africa", getDateWithOffset(1));
+    expectedLatestValues.put("asia", getDateWithOffset(HOURLY, 3));
+    expectedLatestValues.put("africa", getDateWithOffset(HOURLY, 1));
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
     client.dropPartition(dimName, c3, timeParts, nonTimeParts, HOURLY);
-    expectedLatestValues.put("asia", getDateWithOffset(0));
-    expectedLatestValues.put("africa", getDateWithOffset(1));
+    expectedLatestValues.put("asia", NOW);
+    expectedLatestValues.put("africa", getDateWithOffset(HOURLY, 1));
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(0));
+    timeParts.put(getDatePartitionKey(), NOW);
     client.dropPartition(dimName, c3, timeParts, nonTimeParts, HOURLY);
     expectedLatestValues.remove("asia");
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
     nonTimeParts.put("region", "africa");
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(-1));
+    timeParts.put(getDatePartitionKey(), getDateWithOffset(HOURLY, -1));
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
-    timeParts.put(getDatePartitionKey(), getDateWithOffset(3));
+    timeParts.put(getDatePartitionKey(), getDateWithOffset(HOURLY, 3));
     nonTimeParts.remove("africa");
     assertLatestForRegions(storageTableName, expectedLatestValues);
   }
@@ -2016,8 +2008,7 @@ public class TestCubeMetastoreClient {
     String c1TableName = getFactOrDimtableStorageTableName(cubeDim.getName(), c1);
     assertEquals(client.getAllParts(c1TableName).size(), 8);
 
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(0), getDateWithOffset(0), getDateWithOffset(1)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 0, 0, 1));
 
     Map<String, Date> timeParts4 = getTimePartitionByOffsets(getDatePartitionKey(), 0, itPart.getName(), 1,
       etPart.getName(), -1);
@@ -2030,8 +2021,7 @@ public class TestCubeMetastoreClient {
     client.addPartitions(Arrays.asList(partSpec4, partSpec5), c1);
 
     assertEquals(client.getAllParts(c1TableName).size(), 10);
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(1), getDateWithOffset(1), getDateWithOffset(1)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 1, 1, 1));
     Map<String, Date> timeParts6 = getTimePartitionByOffsets(getDatePartitionKey(), -2, itPart.getName(), -1,
       etPart.getName(), -2);
     final StoragePartitionDesc partSpec6 = new StoragePartitionDesc(cubeDim.getName(), timeParts6, null, HOURLY);
@@ -2046,28 +2036,23 @@ public class TestCubeMetastoreClient {
 
     client.addPartition(partSpec7, c1);
     assertEquals(client.getAllParts(c1TableName).size(), 12);
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(1), getDateWithOffset(1), getDateWithOffset(1)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 1, 1, 1));
 
     client.dropPartition(cubeDim.getName(), c1, timeParts5, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 11);
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(0), getDateWithOffset(1), getDateWithOffset(1)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 0, 1, 1));
 
     client.dropPartition(cubeDim.getName(), c1, timeParts7, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 10);
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(0), getDateWithOffset(1), getDateWithOffset(1)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 0, 1, 1));
 
     client.dropPartition(cubeDim.getName(), c1, timeParts2, nonTimeSpec, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 9);
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(0), getDateWithOffset(1), getDateWithOffset(0)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 0, 1, 0));
 
     client.dropPartition(cubeDim.getName(), c1, timeParts4, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 8);
-    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
-      toPartitionArray(HOURLY, getDateWithOffset(0), getDateWithOffset(0), getDateWithOffset(0)));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null), toPartitionArray(HOURLY, 0, 0, 0));
 
     client.dropPartition(cubeDim.getName(), c1, timeParts3, nonTimeSpec, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 5);

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestDateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestDateUtil.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestDateUtil.java
new file mode 100644
index 0000000..7e239f3
--- /dev/null
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestDateUtil.java
@@ -0,0 +1,297 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata;
+
+import static java.util.Calendar.DAY_OF_MONTH;
+import static java.util.Calendar.MONTH;
+
+import static org.apache.lens.cube.metadata.DateUtil.*;
+import static org.apache.lens.cube.metadata.UpdatePeriod.*;
+
+import static org.apache.commons.lang.time.DateUtils.addMilliseconds;
+
+import static org.testng.Assert.assertEquals;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.Set;
+
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang.time.DateUtils;
+
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+import com.beust.jcommander.internal.Sets;
+import com.google.common.collect.Lists;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * Unit tests for cube DateUtil class TestDateUtil.
+ */
+@Slf4j
+public class TestDateUtil {
+
+  public static final String[] TEST_PAIRS = {
+    "2013-Jan-01", "2013-Jan-31", "2013-Jan-01", "2013-May-31",
+    "2013-Jan-01", "2013-Dec-31", "2013-Feb-01", "2013-Apr-25",
+    "2012-Feb-01", "2013-Feb-01", "2011-Feb-01", "2013-Feb-01",
+    "2013-Jan-02", "2013-Feb-02", "2013-Jan-02", "2013-Mar-02",
+  };
+
+  public static final SimpleDateFormat DATE_FMT = new SimpleDateFormat("yyyy-MMM-dd");
+
+  private Date[] pairs;
+
+  @BeforeTest
+  public void setUp() {
+    pairs = new Date[TEST_PAIRS.length];
+    for (int i = 0; i < TEST_PAIRS.length; i++) {
+      try {
+        pairs[i] = DATE_FMT.parse(TEST_PAIRS[i]);
+      } catch (ParseException e) {
+        log.error("Parsing exception while setup.", e);
+      }
+    }
+  }
+
+
+  @Test
+  public void testMonthsBetween() throws Exception {
+    int i = 0;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
+      new CoveringInfo(1, true),
+      "2013-Jan-01 to 2013-Jan-31");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
+      new CoveringInfo(5, true),
+      "2013-Jan-01 to 2013-May-31");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
+      new CoveringInfo(12, true),
+      "2013-Jan-01 to 2013-Dec-31");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(2, false),
+      "2013-Feb-01 to 2013-Apr-25");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(12, true),
+      "2012-Feb-01 to 2013-Feb-01");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(24, true),
+      "2011-Feb-01 to 2013-Feb-01");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "2013-Jan-02 to 2013-Feb-02");
+
+    i += 2;
+    assertEquals(getMonthlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(1, false),
+      "2013-Jan-02 to 2013-Mar-02");
+  }
+
+  @Test
+  public void testQuartersBetween() throws Exception {
+    int i = 0;
+    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "2013-Jan-01 to 2013-Jan-31");
+
+    i += 2;
+    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(1, false),
+      "2013-Jan-01 to 2013-May-31");
+
+    i += 2;
+    assertEquals(getQuarterlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
+      new CoveringInfo(4, true),
+      "2013-Jan-01 to 2013-Dec-31");
+
+    i += 2;
+    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "2013-Feb-01 to 2013-Apr-25");
+
+    i += 2;
+    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(3, false),
+      "2012-Feb-01 to 2013-Feb-01");
+
+    i += 2;
+    assertEquals(getQuarterlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(7, false),
+      "2011-Feb-01 to 2013-Feb-01");
+  }
+
+  @Test
+  public void testYearsBetween() throws Exception {
+    int i = 0;
+    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
+
+    i += 2;
+    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
+
+    i += 2;
+    assertEquals(getYearlyCoveringInfo(pairs[i], DateUtils.round(pairs[i + 1], MONTH)),
+      new CoveringInfo(1, true), ""
+        + pairs[i] + "->" + pairs[i + 1]);
+
+    i += 2;
+    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
+
+    i += 2;
+    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(0, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
+
+    i += 2;
+    assertEquals(getYearlyCoveringInfo(pairs[i], pairs[i + 1]), new CoveringInfo(1, false),
+      "" + pairs[i] + "->" + pairs[i + 1]);
+  }
+
+  @Test
+  public void testWeeksBetween() throws Exception {
+    CoveringInfo weeks;
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-2"));
+    assertEquals(weeks, new CoveringInfo(1, true), "2013-May-26 to 2013-Jun-2");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-3"));
+    assertEquals(weeks, new CoveringInfo(0, false), "2013-May-26 to 2013-Jun-2");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-9"));
+    assertEquals(weeks, new CoveringInfo(1, false), "2013-May-26 to 2013-Jun-2");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-27"), DATE_FMT.parse("2013-Jun-1"));
+    assertEquals(weeks, new CoveringInfo(0, false), "2013-May-27 to 2013-Jun-1");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-25"), DATE_FMT.parse("2013-Jun-2"));
+    assertEquals(weeks, new CoveringInfo(1, false), "2013-May-25 to 2013-Jun-1");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-9"));
+    assertEquals(weeks, new CoveringInfo(2, true), "2013-May-26 to 2013-Jun-8");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-10"));
+    assertEquals(weeks, new CoveringInfo(2, false), "2013-May-26 to 2013-Jun-10");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2015-Dec-27"), DATE_FMT.parse("2016-Jan-03"));
+    assertEquals(weeks, new CoveringInfo(1, true), "2015-Dec-27 to 2016-Jan-03");
+  }
+
+  @Test
+  public void testNowWithGranularity() throws Exception {
+    String dateFmt = "yyyy/MM/dd-HH.mm.ss.SSS";
+    // Tuesday Sept 23, 2014, 12.02.05.500 pm
+    String testDateStr = "2014/09/23-12.02.05.500";
+    final SimpleDateFormat sdf = new SimpleDateFormat(dateFmt);
+    final Date testDate = sdf.parse(testDateStr);
+
+    System.out.print("@@ testDateStr=" + testDateStr + " parsed date=" + testDate);
+
+    // Tests without a diff, just resolve now with different granularity
+    assertEquals(testDateStr, sdf.format(resolveDate("now", testDate)));
+    assertEquals("2014/01/01-00.00.00.000", sdf.format(resolveDate("now.year", testDate)));
+    assertEquals("2014/09/01-00.00.00.000", sdf.format(resolveDate("now.month", testDate)));
+    // Start of week resolves to Sunday
+    assertEquals("2014/09/21-00.00.00.000", sdf.format(resolveDate("now.week", testDate)));
+    assertEquals("2014/09/23-00.00.00.000", sdf.format(resolveDate("now.day", testDate)));
+    assertEquals("2014/09/23-12.00.00.000", sdf.format(resolveDate("now.hour", testDate)));
+    assertEquals("2014/09/23-12.02.00.000", sdf.format(resolveDate("now.minute", testDate)));
+    assertEquals("2014/09/23-12.02.05.000", sdf.format(resolveDate("now.second", testDate)));
+
+    // Tests with a diff
+    assertEquals("2014/09/22-00.00.00.000", sdf.format(resolveDate("now.day -1day", testDate)));
+    assertEquals("2014/09/23-10.00.00.000", sdf.format(resolveDate("now.hour -2hour", testDate)));
+    assertEquals("2014/09/24-12.00.00.000", sdf.format(resolveDate("now.hour +24hour", testDate)));
+    assertEquals("2015/01/01-00.00.00.000", sdf.format(resolveDate("now.year +1year", testDate)));
+    assertEquals("2014/02/01-00.00.00.000", sdf.format(resolveDate("now.year +1month", testDate)));
+  }
+
+  @Test
+  public void testFloorDate() throws ParseException {
+    Date date = ABSDATE_PARSER.get().parse("2015-01-01-00:00:00,000");
+    Date curDate = date;
+    for (int i = 0; i < 284; i++) {
+      assertEquals(getFloorDate(curDate, YEARLY), date);
+      curDate = addMilliseconds(curDate, 111111111);
+    }
+    assertEquals(getFloorDate(curDate, YEARLY), DateUtils.addYears(date, 1));
+    assertEquals(getFloorDate(date, WEEKLY), ABSDATE_PARSER.get().parse("2014-12-28-00:00:00,000"));
+  }
+
+  @Test
+  public void testCeilDate() throws ParseException {
+    Date date = ABSDATE_PARSER.get().parse("2015-12-26-06:30:15,040");
+    assertEquals(getCeilDate(date, YEARLY), ABSDATE_PARSER.get().parse("2016-01-01-00:00:00,000"));
+    assertEquals(getCeilDate(date, MONTHLY), ABSDATE_PARSER.get().parse("2016-01-01-00:00:00,000"));
+    assertEquals(getCeilDate(date, DAILY), ABSDATE_PARSER.get().parse("2015-12-27-00:00:00,000"));
+    assertEquals(getCeilDate(date, HOURLY), ABSDATE_PARSER.get().parse("2015-12-26-07:00:00,000"));
+    assertEquals(getCeilDate(date, MINUTELY), ABSDATE_PARSER.get().parse("2015-12-26-06:31:00,000"));
+    assertEquals(getCeilDate(date, SECONDLY), ABSDATE_PARSER.get().parse("2015-12-26-06:30:16,000"));
+    assertEquals(getCeilDate(date, WEEKLY), ABSDATE_PARSER.get().parse("2015-12-27-00:00:00,000"));
+  }
+
+  @Test
+  public void testTimeDiff() throws LensException {
+    ArrayList<String> minusFourDays =
+      Lists.newArrayList("-4 days", "-4days", "-4day", "-4 day", "- 4days", "- 4 day");
+    ArrayList<String> plusFourDays =
+      Lists.newArrayList("+4 days", "4 days", "+4days", "4day", "4 day", "+ 4days", "+ 4 day", "+4 day");
+    Set<TimeDiff> diffs = Sets.newHashSet();
+    for (String diffStr : minusFourDays) {
+      diffs.add(TimeDiff.parseFrom(diffStr));
+    }
+    assertEquals(diffs.size(), 1);
+    TimeDiff minusFourDaysDiff = diffs.iterator().next();
+    assertEquals(minusFourDaysDiff.quantity, -4);
+    assertEquals(minusFourDaysDiff.updatePeriod, DAILY);
+
+    diffs.clear();
+    for (String diffStr : plusFourDays) {
+      diffs.add(TimeDiff.parseFrom(diffStr));
+    }
+    assertEquals(diffs.size(), 1);
+    TimeDiff plusFourDaysDiff = diffs.iterator().next();
+    assertEquals(plusFourDaysDiff.quantity, 4);
+    assertEquals(plusFourDaysDiff.updatePeriod, DAILY);
+    Date now = new Date();
+    assertEquals(minusFourDaysDiff.offsetFrom(plusFourDaysDiff.offsetFrom(now)), now);
+    assertEquals(plusFourDaysDiff.offsetFrom(minusFourDaysDiff.offsetFrom(now)), now);
+    assertEquals(minusFourDaysDiff.negativeOffsetFrom(now), plusFourDaysDiff.offsetFrom(now));
+    assertEquals(minusFourDaysDiff.offsetFrom(now), plusFourDaysDiff.negativeOffsetFrom(now));
+  }
+
+  @Test
+  public void testRelativeToAbsolute() throws LensException {
+    Date now = new Date();
+    Date nowDay = DateUtils.truncate(now, DAY_OF_MONTH);
+    Date nowDayMinus2Days = DateUtils.add(nowDay, DAY_OF_MONTH, -2);
+    assertEquals(relativeToAbsolute("now", now), DateUtil.ABSDATE_PARSER.get().format(now));
+    assertEquals(relativeToAbsolute("now.day", now), DateUtil.ABSDATE_PARSER.get().format(nowDay));
+    assertEquals(relativeToAbsolute("now.day - 2 days", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+    assertEquals(relativeToAbsolute("now.day - 2 day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+    assertEquals(relativeToAbsolute("now.day - 2day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+    assertEquals(relativeToAbsolute("now.day -2 day", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+    assertEquals(relativeToAbsolute("now.day -2 days", now), DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 1357035..2a50d74 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -19,14 +19,14 @@
 
 package org.apache.lens.cube.parse;
 
-import static java.util.Calendar.*;
+import static java.util.Calendar.DAY_OF_MONTH;
+import static java.util.Calendar.HOUR_OF_DAY;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.metadata.UpdatePeriod.*;
 
 import static org.testng.Assert.*;
 
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
 import java.util.*;
 
 import org.apache.lens.cube.metadata.*;
@@ -39,7 +39,6 @@ import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.time.DateUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -88,10 +87,6 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 public class CubeTestSetup {
 
-  public static final String HOUR_FMT = "yyyy-MM-dd-HH";
-  public static final SimpleDateFormat HOUR_PARSER = new SimpleDateFormat(HOUR_FMT);
-  public static final String MONTH_FMT = "yyyy-MM";
-  public static final SimpleDateFormat MONTH_PARSER = new SimpleDateFormat(MONTH_FMT);
   private Set<CubeMeasure> cubeMeasures;
   private Set<CubeDimAttribute> cubeDimensions;
   public static final String TEST_CUBE_NAME = "testCube";
@@ -101,30 +96,6 @@ public class CubeTestSetup {
   public static final String DERIVED_CUBE_NAME2 = "der2";
   public static final String DERIVED_CUBE_NAME3 = "der3";
 
-  // Time Instances as Date Type
-  public static final Date NOW;
-  public static final Date LAST_HOUR;
-  public static final Date TWODAYS_BACK;
-  public static final Date ONE_DAY_BACK;
-  public static final Date TWO_MONTHS_BACK;
-  public static final Date BEFORE_4_DAYS_START;
-  public static final Date BEFORE_4_DAYS_END;
-  public static final Date THIS_YEAR_START;
-  public static final Date THIS_YEAR_END;
-  public static final Date LAST_YEAR_START;
-  public static final Date LAST_YEAR_END;
-
-  // Time Ranges
-  public static final String LAST_HOUR_TIME_RANGE;
-  public static final String TWO_DAYS_RANGE;
-  public static final String TWO_DAYS_RANGE_TTD;
-  public static final String THIS_YEAR_RANGE;
-  public static final String LAST_YEAR_RANGE;
-  public static final String TWO_MONTHS_RANGE_UPTO_MONTH;
-  public static final String TWO_MONTHS_RANGE_UPTO_HOURS;
-  public static final String TWO_DAYS_RANGE_BEFORE_4_DAYS;
-
-  private static boolean zerothHour;
   private static String c0 = "C0";
   private static String c1 = "C1";
   private static String c2 = "C2";
@@ -134,99 +105,13 @@ public class CubeTestSetup {
   private static Map<String, String> factValidityProperties = Maps.newHashMap();
   @Getter
   private static Map<String, List<UpdatePeriod>> storageToUpdatePeriodMap = new LinkedHashMap<>();
-  public static class DateOffsetProvider extends HashMap<Integer, Date> {
-    private final UpdatePeriod updatePeriod;
-    Calendar calendar = Calendar.getInstance();
-
-    public DateOffsetProvider(UpdatePeriod updatePeriod) {
-      this.updatePeriod = updatePeriod;
-    }
-    {
-      put(0, calendar.getTime());
-    }
-
-    @Override
-    public Date get(Object key) {
-      if (!containsKey(key) && key instanceof Integer) {
-        calendar.setTime(super.get(0));
-        calendar.add(updatePeriod.calendarField(), (Integer) key);
-        put((Integer) key, calendar.getTime());
-      }
-      return super.get(key);
-    }
-  }
-
   static {
-    Calendar cal = Calendar.getInstance();
-    // Keep in sync
-    NOW = cal.getTime();
-    log.debug("Test now:{}", NOW);
-
-    // Figure out if current hour is 0th hour
-    zerothHour = (cal.get(HOUR_OF_DAY) == 0);
-
-    // Figure out last hour
-    cal.add(HOUR_OF_DAY, -1);
-    LAST_HOUR = cal.getTime();
-    log.debug("LastHour:{}", LAST_HOUR);
-
-    cal.setTime(NOW);
-    cal.add(DAY_OF_MONTH, -1);
-    ONE_DAY_BACK = cal.getTime();
-    cal.add(DAY_OF_MONTH, -1);
-    TWODAYS_BACK = cal.getTime();
-    System.out.println("Test TWODAYS_BACK:" + TWODAYS_BACK);
-
-    // two months back
-    cal.setTime(NOW);
-    cal.add(MONTH, -2);
-    TWO_MONTHS_BACK = cal.getTime();
-    System.out.println("Test TWO_MONTHS_BACK:" + TWO_MONTHS_BACK);
-
-    // Before 4days
-    cal.setTime(NOW);
-    cal.add(DAY_OF_MONTH, -4);
-    BEFORE_4_DAYS_END = cal.getTime();
-    cal.add(DAY_OF_MONTH, -2);
-    BEFORE_4_DAYS_START = cal.getTime();
-
-
-    THIS_YEAR_START = DateUtils.truncate(NOW, YEARLY.calendarField());
-    THIS_YEAR_END = DateUtils.addYears(THIS_YEAR_START, 1);
-    LAST_YEAR_START = DateUtils.addYears(THIS_YEAR_START, -1);
-    LAST_YEAR_END = THIS_YEAR_START;
-    TWO_DAYS_RANGE_BEFORE_4_DAYS =
-      "time_range_in(d_time, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-        + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')";
-
-
-    TWO_DAYS_RANGE = "time_range_in(d_time, '" + getDateUptoHours(TWODAYS_BACK) + "','" + getDateUptoHours(NOW) + "')";
-    TWO_DAYS_RANGE_TTD = "time_range_in(test_time_dim, '" + getDateUptoHours(TWODAYS_BACK) + "','"
-      + getDateUptoHours(NOW) + "')";
-    THIS_YEAR_RANGE =
-      "time_range_in(d_time, '" + getDateUptoHours(THIS_YEAR_START) + "','" + getDateUptoHours(THIS_YEAR_END) + "')";
-    LAST_YEAR_RANGE =
-      "time_range_in(d_time, '" + getDateUptoHours(LAST_YEAR_START) + "','" + getDateUptoHours(LAST_YEAR_END) + "')";
-    TWO_MONTHS_RANGE_UPTO_MONTH =
-      "time_range_in(d_time, '" + getDateUptoMonth(TWO_MONTHS_BACK) + "','" + getDateUptoMonth(NOW) + "')";
-    TWO_MONTHS_RANGE_UPTO_HOURS =
-      "time_range_in(d_time, '" + getDateUptoHours(TWO_MONTHS_BACK) + "','" + getDateUptoHours(NOW) + "')";
-
-    // calculate LAST_HOUR_TIME_RANGE
-    LAST_HOUR_TIME_RANGE = getTimeRangeString(getDateUptoHours(LAST_HOUR), getDateUptoHours(NOW));
     factValidityProperties.put(MetastoreConstants.FACT_RELATIVE_START_TIME, "now.year - 90 days");
   }
 
-  public static boolean isZerothHour() {
-    return zerothHour;
-  }
 
   public static String getDateUptoHours(Date dt) {
-    return HOUR_PARSER.format(dt);
-  }
-
-  public static String getDateUptoMonth(Date dt) {
-    return MONTH_PARSER.format(dt);
+    return HOURLY.format(dt);
   }
 
   interface StoragePartitionProvider {
@@ -265,23 +150,16 @@ public class CubeTestSetup {
     StringBuilder expected = new StringBuilder();
     for (Map.Entry<String, String> entry : storageTableToWhereClause.entrySet()) {
       String storageTable = entry.getKey();
-      expected.append(selExpr);
-      expected.append(storageTable);
-      expected.append(" ");
-      expected.append(cubeName);
-      expected.append(" WHERE ");
-      expected.append("(");
+      expected.append(selExpr).append(storageTable).append(" ").append(cubeName).append(" WHERE ").append("(");
       if (notLatestConditions != null) {
         for (String cond : notLatestConditions) {
           expected.append(cond).append(" AND ");
         }
       }
       if (whereExpr != null) {
-        expected.append(whereExpr);
-        expected.append(" AND ");
+        expected.append(whereExpr).append(" AND ");
       }
-      expected.append(entry.getValue());
-      expected.append(")");
+      expected.append(entry.getValue()).append(")");
       if (postWhereExpr != null) {
         expected.append(" ").append(postWhereExpr);
       }
@@ -328,27 +206,20 @@ public class CubeTestSetup {
     assertEquals(1, numTabs);
     for (Map.Entry<String, String> entry : storageTableToWhereClause.entrySet()) {
       String storageTable = entry.getKey();
-      expected.append(selExpr);
-      expected.append(storageTable);
-      expected.append(" ");
-      expected.append(cubeName);
-      expected.append(joinExpr);
-      expected.append(" WHERE ");
-      expected.append("(");
+      expected.append(selExpr).append(storageTable).append(" ").append(cubeName).append(joinExpr)
+        .append(" WHERE ").append("(");
       if (notLatestConditions != null) {
         for (String cond : notLatestConditions) {
           expected.append(cond).append(" AND ");
         }
       }
       if (whereExpr != null) {
-        expected.append(whereExpr);
-        expected.append(" AND ");
+        expected.append(whereExpr).append(" AND ");
       }
       expected.append(entry.getValue());
       if (joinWhereConds != null) {
         for (String joinEntry : joinWhereConds) {
-          expected.append(" AND ");
-          expected.append(joinEntry);
+          expected.append(" AND ").append(joinEntry);
         }
       }
       expected.append(")");
@@ -379,7 +250,7 @@ public class CubeTestSetup {
 
   public static Map<String, String> getWhereForDailyAndHourly2daysWithTimeDim(String cubeName, String timedDimension,
     Date from, Date to, String... storageTables) {
-    Map<String, String> storageTableToWhereClause = new LinkedHashMap<String, String>();
+    Map<String, String> storageTableToWhereClause = new LinkedHashMap<>();
     if (storageToUpdatePeriodMap.isEmpty()) {
       String whereClause = getWhereForDailyAndHourly2daysWithTimeDim(cubeName, timedDimension, from, to);
       storageTableToWhereClause.put(getStorageTableString(storageTables), whereClause);
@@ -398,7 +269,7 @@ public class CubeTestSetup {
   private static String getStorageTableString(String... storageTables) {
     String dbName = getDbName();
     if (!StringUtils.isBlank(dbName)) {
-      List<String> tbls = new ArrayList<String>();
+      List<String> tbls = new ArrayList<>();
       for (String tbl : storageTables) {
         tbls.add(dbName + tbl);
       }
@@ -409,10 +280,10 @@ public class CubeTestSetup {
 
   public static String getWhereForDailyAndHourly2daysWithTimeDim(String cubeName, String timedDimension, Date from,
     Date to) {
-    List<String> hourlyparts = new ArrayList<String>();
-    List<String> dailyparts = new ArrayList<String>();
+    Set<String> hourlyparts = new HashSet<>();
+    Set<String> dailyparts = new HashSet<>();
     Date dayStart;
-    if (!CubeTestSetup.isZerothHour()) {
+    if (!isZerothHour()) {
       addParts(hourlyparts, HOURLY, from, DateUtil.getCeilDate(from, DAILY));
       addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(to, DAILY),
         DateUtil.getFloorDate(to, HOURLY));
@@ -421,7 +292,7 @@ public class CubeTestSetup {
       dayStart = from;
     }
     addParts(dailyparts, DAILY, dayStart, DateUtil.getFloorDate(to, DAILY));
-    List<String> parts = new ArrayList<String>();
+    List<String> parts = new ArrayList<>();
     parts.addAll(hourlyparts);
     parts.addAll(dailyparts);
     Collections.sort(parts);
@@ -434,7 +305,7 @@ public class CubeTestSetup {
     List<String> hourlyparts = new ArrayList<String>();
     List<String> dailyparts = new ArrayList<String>();
     Date dayStart;
-    if (!CubeTestSetup.isZerothHour()) {
+    if (!isZerothHour()) {
       addParts(hourlyparts, HOURLY, from, DateUtil.getCeilDate(from, DAILY));
       addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(to, DAILY),
         DateUtil.getFloorDate(to, HOURLY));
@@ -458,7 +329,7 @@ public class CubeTestSetup {
     List<String> monthlyparts = new ArrayList<String>();
     Date dayStart = TWO_MONTHS_BACK;
     Date monthStart = TWO_MONTHS_BACK;
-    if (!CubeTestSetup.isZerothHour()) {
+    if (!isZerothHour()) {
       addParts(hourlyparts, HOURLY, TWO_MONTHS_BACK,
         DateUtil.getCeilDate(TWO_MONTHS_BACK, DAILY));
       addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(NOW, DAILY),
@@ -510,7 +381,7 @@ public class CubeTestSetup {
 
     Date dayStart = TWO_MONTHS_BACK;
     Date monthStart = TWO_MONTHS_BACK;
-    if (!CubeTestSetup.isZerothHour()) {
+    if (!isZerothHour()) {
       addParts(hourlyparts, HOURLY, TWO_MONTHS_BACK,
         DateUtil.getCeilDate(TWO_MONTHS_BACK, DAILY));
       addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(NOW, DAILY),
@@ -571,13 +442,12 @@ public class CubeTestSetup {
     return storageTableToWhereClause;
   }
 
-  public static void addParts(List<String> partitions, UpdatePeriod updatePeriod, Date from, Date to) {
-    DateFormat fmt = updatePeriod.format();
+  public static void addParts(Collection<String> partitions, UpdatePeriod updatePeriod, Date from, Date to) {
     Calendar cal = Calendar.getInstance();
     cal.setTime(from);
     Date dt = cal.getTime();
     while (dt.before(to)) {
-      String part = fmt.format(dt);
+      String part = updatePeriod.format(dt);
       cal.add(updatePeriod.calendarField(), 1);
       partitions.add(part);
       dt = cal.getTime();
@@ -1418,9 +1288,9 @@ public class CubeTestSetup {
     }
 
     // Add all hourly partitions for TWO_DAYS_RANGE_BEFORE_4_DAYS
-    cal.setTime(BEFORE_4_DAYS_START);
+    cal.setTime(BEFORE_6_DAYS);
     temp = cal.getTime();
-    while (!(temp.after(BEFORE_4_DAYS_END))) {
+    while (!(temp.after(BEFORE_4_DAYS))) {
       Map<String, Date> timeParts = new HashMap<String, Date>();
       timeParts.put("ttd", temp);
       timeParts.put("ttd2", temp);
@@ -1533,9 +1403,9 @@ public class CubeTestSetup {
     }
 
     // Add all hourly partitions for TWO_DAYS_RANGE_BEFORE_4_DAYS
-    cal.setTime(BEFORE_4_DAYS_START);
+    cal.setTime(BEFORE_6_DAYS);
     temp = cal.getTime();
-    while (!(temp.after(BEFORE_4_DAYS_END))) {
+    while (!(temp.after(BEFORE_4_DAYS))) {
       Map<String, Date> timeParts = new HashMap<String, Date>();
       timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), temp);
       StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, HOURLY);
@@ -1585,9 +1455,9 @@ public class CubeTestSetup {
     assertTimeline(client, fact.getName(), c4, HOURLY, "ttd2", ttd2StoreAll);
 
     // Add all hourly partitions for TWO_DAYS_RANGE_BEFORE_4_DAYS
-    cal.setTime(BEFORE_4_DAYS_START);
+    cal.setTime(BEFORE_6_DAYS);
     temp = cal.getTime();
-    while (!(temp.after(BEFORE_4_DAYS_END))) {
+    while (!(temp.after(BEFORE_4_DAYS))) {
       Map<String, Date> timeParts = new HashMap<String, Date>();
       timeParts.put("ttd", temp);
       timeParts.put("ttd2", temp);
@@ -2692,9 +2562,4 @@ public class CubeTestSetup {
     System.out.println("--query- " + query);
     HQLParser.printAST(HQLParser.parseHQL(query, new HiveConf()));
   }
-
-
-  private static String getTimeRangeString(final String startDate, final String endDate) {
-    return "time_range_in(d_time, '" + startDate + "','" + endDate + "')";
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
index 0fea9f1..ff7c15f 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/FieldsCannotBeQueriedTogetherTest.java
@@ -18,7 +18,7 @@
  */
 package org.apache.lens.cube.parse;
 
-import static org.apache.lens.cube.parse.CubeTestSetup.*;
+import static org.apache.lens.cube.metadata.DateFactory.*;
 
 import static org.testng.Assert.assertEquals;
 import static org.testng.Assert.fail;
@@ -301,8 +301,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
     Configuration queryConf = new Configuration(conf);
     queryConf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, true);
 
-    testFieldsCannotBeQueriedTogetherError("select msr4 from basecube where " + "time_range_in(d_time, '"
-        + getDateUptoHours(TWODAYS_BACK) + "','" + getDateUptoHours(CubeTestSetup.NOW) + "')",
+    testFieldsCannotBeQueriedTogetherError("select msr4 from basecube where " + TWO_DAYS_RANGE,
         Arrays.asList("d_time", "msr4"), queryConf);
   }
 
@@ -321,8 +320,7 @@ public class FieldsCannotBeQueriedTogetherTest extends TestQueryRewrite {
     Configuration queryConf = new Configuration(conf);
     queryConf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, false);
 
-    testFieldsCannotBeQueriedTogetherError("select msr4 from basecube where " + "time_range_in(d_time, '"
-        + getDateUptoHours(TWODAYS_BACK) + "','" + getDateUptoHours(CubeTestSetup.NOW) + "')",
+    testFieldsCannotBeQueriedTogetherError("select msr4 from basecube where " + TWO_DAYS_RANGE,
         Arrays.asList("d_time", "msr4"), queryConf);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
index 753ca33..a48d753 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
@@ -19,6 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
 import static org.apache.lens.cube.parse.TestCubeRewriter.compareQueries;
 

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index ee84a4c..97c6d08 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -19,9 +19,10 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
+import static org.apache.lens.cube.metadata.DateUtil.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.MISSING_PARTITIONS;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
-import static org.apache.lens.cube.parse.DateUtil.*;
 import static org.apache.lens.cube.parse.TestCubeRewriter.compareQueries;
 
 import static org.apache.hadoop.hive.ql.parse.HiveParser.KW_AND;
@@ -33,6 +34,7 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.server.api.error.LensException;
@@ -47,7 +49,6 @@ import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
 import com.google.common.base.Splitter;
-
 import lombok.Getter;
 
 public class TestBaseCubeQueries extends TestQueryRewrite {

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
index 9a2493c..eeba861 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
@@ -19,12 +19,12 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.NOW;
+import static org.apache.lens.cube.metadata.DateFactory.TWODAYS_BACK;
+import static org.apache.lens.cube.metadata.UpdatePeriod.DAILY;
+
 import java.text.DateFormat;
-import java.util.ArrayList;
 import java.util.Date;
-import java.util.List;
-
-import org.apache.lens.cube.metadata.UpdatePeriod;
 
 import org.testng.Assert;
 
@@ -50,9 +50,9 @@ public class TestBetweenTimeRangeWriter extends TestTimeRangeWriter {
     String expected = null;
     if (format == null) {
       expected =
-        getBetweenClause("test", "dt", CubeTestSetup.TWODAYS_BACK, CubeTestSetup.NOW, UpdatePeriod.DAILY.format());
+        getBetweenClause("test", "dt", TWODAYS_BACK, NOW, DAILY.format());
     } else {
-      expected = getBetweenClause("test", "dt", CubeTestSetup.TWODAYS_BACK, CubeTestSetup.NOW, format);
+      expected = getBetweenClause("test", "dt", TWODAYS_BACK, NOW, format);
     }
     Assert.assertEquals(expected, whereClause);
   }
@@ -62,17 +62,4 @@ public class TestBetweenTimeRangeWriter extends TestTimeRangeWriter {
     String last = format.format(end);
     return " (" + alias + "." + colName + " BETWEEN '" + first + "' AND '" + last + "') ";
   }
-
-  @Override
-  public void validateSingle(String whereClause, DateFormat format) {
-    List<String> parts = new ArrayList<String>();
-    if (format == null) {
-      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.ONE_DAY_BACK));
-    } else {
-      parts.add(format.format(CubeTestSetup.ONE_DAY_BACK));
-    }
-
-    System.out.println("Expected :" + StorageUtil.getWherePartClause("dt", "test", parts));
-    Assert.assertEquals(whereClause, StorageUtil.getWherePartClause("dt", "test", parts));
-  }
 }


[50/50] [abbrv] lens git commit: Merged master into LENS-581

Posted by sh...@apache.org.
Merged master into LENS-581


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/25a17dfc
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/25a17dfc
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/25a17dfc

Branch: refs/heads/LENS-581
Commit: 25a17dfc9eed289fbedb5b7b7c66a77e471cbd4c
Parents: 5052e2a 4d7c8e4
Author: Sharad Agarwal <sh...@apache.org>
Authored: Wed Dec 30 12:36:48 2015 +0530
Committer: Sharad Agarwal <sh...@apache.org>
Committed: Wed Dec 30 12:36:48 2015 +0530

----------------------------------------------------------------------
 .gitignore                                      |    6 +
 .reviewboardrc                                  |   17 +
 DISCLAIMER                                      |    6 -
 NOTICE                                          |    2 +-
 README.md                                       |   11 +-
 bin-dist-files/LICENSE                          |    8 +
 bin-dist-files/NOTICE                           |    5 +-
 bin-dist-files/README                           |    6 +-
 checkstyle/pom.xml                              |    2 +-
 lens-api/pom.xml                                |    6 +-
 .../java/org/apache/lens/api/APIResult.java     |   16 +-
 .../main/java/org/apache/lens/api/LensConf.java |   13 +
 .../org/apache/lens/api/LensSessionHandle.java  |    4 +-
 .../lens/api/error/LensCommonErrorCode.java     |   12 +-
 .../apache/lens/api/jaxb/LensJAXBContext.java   |   91 +
 .../lens/api/jaxb/LensJAXBContextResolver.java  |   74 +
 .../api/jaxb/LensJAXBValidationException.java   |   31 +
 .../lens/api/query/InMemoryQueryResult.java     |   24 +-
 .../lens/api/query/LensPreparedQuery.java       |    8 +-
 .../org/apache/lens/api/query/LensQuery.java    |   12 +-
 .../lens/api/query/PersistentQueryResult.java   |   28 +-
 .../org/apache/lens/api/query/QueryPlan.java    |   24 -
 .../org/apache/lens/api/query/QueryResult.java  |    4 +-
 .../org/apache/lens/api/query/QueryStatus.java  |   39 +-
 .../lens/api/query/SchedulerJobHandle.java      |   82 +
 .../apache/lens/api/query/SchedulerJobInfo.java |   68 +
 .../api/query/SchedulerJobInstanceHandle.java   |   86 +
 .../api/query/SchedulerJobInstanceInfo.java     |   84 +
 .../lens/api/query/save/ListResponse.java       |   51 +
 .../apache/lens/api/query/save/Parameter.java   |   80 +
 .../api/query/save/ParameterCollectionType.java |   40 +
 .../lens/api/query/save/ParameterDataType.java  |   48 +
 .../api/query/save/ParameterParserResponse.java |   50 +
 .../query/save/ResourceModifiedResponse.java    |   70 +
 .../apache/lens/api/query/save/SavedQuery.java  |   70 +
 .../apache/lens/api/result/LensAPIResult.java   |   11 +-
 .../org/apache/lens/api/result/LensErrorTO.java |    8 +-
 .../api/result/LensJAXBContextResolver.java     |   77 -
 .../apache/lens/api/result/PrettyPrintable.java |    2 +-
 .../org/apache/lens/api/util/CommonUtils.java   |   81 +
 .../org/apache/lens/api/util/PathValidator.java |  139 ++
 lens-api/src/main/resources/cube-0.1.xsd        |   66 +-
 lens-api/src/main/resources/lens-errors.conf    |  316 ++-
 .../src/main/resources/scheduler-job-0.1.xsd    |  283 +++
 .../org/apache/lens/api/TestPathValidator.java  |  137 ++
 .../apache/lens/api/util/CommonUtilsTest.java   |   54 +
 .../apache/lens/doc/TestGenerateConfigDoc.java  |   21 +
 lens-cli/pom.xml                                |    2 +-
 .../lens/cli/commands/BaseLensCommand.java      |   47 +-
 .../commands/ConceptualTableCrudCommand.java    |   31 +
 .../lens/cli/commands/LensCRUDCommand.java      |   11 +-
 .../cli/commands/LensConnectionCommands.java    |   77 +-
 .../lens/cli/commands/LensCubeCommands.java     |   11 +-
 .../lens/cli/commands/LensDatabaseCommands.java |    3 +-
 .../cli/commands/LensDimensionCommands.java     |   12 +-
 .../commands/LensDimensionTableCommands.java    |   16 +-
 .../lens/cli/commands/LensFactCommands.java     |   16 +-
 .../cli/commands/LensLogResourceCommands.java   |   78 +
 .../cli/commands/LensNativeTableCommands.java   |    2 +-
 .../lens/cli/commands/LensQueryCommands.java    |  175 +-
 .../lens/cli/commands/LensStorageCommands.java  |    9 +-
 .../cli/commands/LogicalTableCrudCommand.java   |  112 +-
 .../cli/commands/PhysicalTableCrudCommand.java  |  129 --
 .../META-INF/spring/spring-shell-plugin.xml     |    3 +
 .../apache/lens/cli/ExecuteQueryCommandIT.java  |    4 +-
 .../lens/cli/TestLensConnectionCliCommands.java |   12 +-
 .../apache/lens/cli/TestLensCubeCommands.java   |   65 +-
 .../lens/cli/TestLensDatabaseCommands.java      |    3 +-
 .../lens/cli/TestLensDimensionCommands.java     |   10 +-
 .../cli/TestLensDimensionTableCommands.java     |   25 +-
 .../apache/lens/cli/TestLensFactCommands.java   |   20 +-
 .../TestLensFactCommandsWithMissingWeight.java  |    6 +-
 .../lens/cli/TestLensLogResourceCommands.java   |   97 +
 .../lens/cli/TestLensNativeTableCommands.java   |    6 +-
 .../apache/lens/cli/TestLensQueryCommands.java  |  195 +-
 .../lens/cli/TestLensStorageCommands.java       |    8 +-
 .../lens/cli/doc/TestGenerateCLIUserDoc.java    |    3 +-
 lens-cli/src/test/resources/cli-intro.apt       |   23 +-
 .../resources/dim-local-storage-element.xml     |    6 +-
 lens-cli/src/test/resources/dim1-local-part.xml |    7 +-
 .../src/test/resources/dim1-local-parts.xml     |    3 +-
 lens-cli/src/test/resources/dim_table.xml       |   15 +-
 lens-cli/src/test/resources/dim_table2.xml      |   13 +-
 .../drivers/hive/hive1/hivedriver-site.xml      |   80 +
 .../resources/fact-local-storage-element.xml    |    4 +-
 .../src/test/resources/fact1-local-part.xml     |    5 +-
 .../src/test/resources/fact1-local-parts.xml    |    3 +-
 lens-cli/src/test/resources/fact1.xml           |   17 +-
 .../src/test/resources/fact_without_weight.xml  |   57 +-
 lens-cli/src/test/resources/sample-cube.xml     |   33 +
 lens-cli/src/test/resources/test-detail.xml     |   32 +
 lens-cli/src/test/resources/test-dimension.xml  |   12 +
 lens-client/pom.xml                             |   24 +-
 .../java/org/apache/lens/client/LensClient.java |   37 +-
 .../apache/lens/client/LensClientConfig.java    |   14 +
 .../org/apache/lens/client/LensConnection.java  |   36 +-
 .../lens/client/LensConnectionParams.java       |   29 +
 .../apache/lens/client/LensMetadataClient.java  |   21 +-
 .../org/apache/lens/client/LensStatement.java   |  111 +-
 .../org/apache/lens/client/RequestFilter.java   |   36 +
 .../src/main/resources/lens-client-default.xml  |   15 +
 .../apache/lens/client/RequestTestFilter.java   |   37 +
 .../org/apache/lens/client/TestLensClient.java  |    1 +
 .../drivers/hive/hive1/hivedriver-site.xml      |   80 +
 .../src/test/resources/lens-client-site.xml     |   33 +
 lens-cube/pom.xml                               |    6 +-
 .../ColUnAvailableInTimeRangeException.java     |    6 +-
 .../FieldsCannotBeQueriedTogetherException.java |    6 +-
 .../lens/cube/error/LensCubeErrorCode.java      |   56 +-
 .../error/NoCandidateDimAvailableException.java |   47 +
 .../NoCandidateFactAvailableException.java      |   48 +
 .../lens/cube/metadata/AbstractBaseTable.java   |    8 +-
 .../lens/cube/metadata/BaseDimAttribute.java    |   43 +-
 .../org/apache/lens/cube/metadata/Cube.java     |   10 +-
 .../apache/lens/cube/metadata/CubeColumn.java   |    1 -
 .../lens/cube/metadata/CubeFactTable.java       |   23 +-
 .../lens/cube/metadata/CubeMetastoreClient.java |  321 ++-
 .../org/apache/lens/cube/metadata/DateUtil.java |  396 ++++
 .../apache/lens/cube/metadata/DerivedCube.java  |   70 +-
 .../apache/lens/cube/metadata/ExprColumn.java   |   60 +-
 .../lens/cube/metadata/FactPartition.java       |   16 +-
 .../lens/cube/metadata/InlineDimAttribute.java  |   97 -
 .../apache/lens/cube/metadata/JoinChain.java    |    8 +-
 .../lens/cube/metadata/MetastoreConstants.java  |    1 +
 .../lens/cube/metadata/MetastoreUtil.java       |  163 +-
 .../cube/metadata/ReferencedDimAtrribute.java   |   72 +-
 .../apache/lens/cube/metadata/SchemaGraph.java  |   45 +-
 .../org/apache/lens/cube/metadata/Storage.java  |    2 +-
 .../cube/metadata/StoragePartitionDesc.java     |    2 +-
 .../lens/cube/metadata/StorageTableDesc.java    |   21 +
 .../lens/cube/metadata/TableReference.java      |   73 +-
 .../lens/cube/metadata/TimePartition.java       |   15 +-
 .../lens/cube/metadata/TimePartitionRange.java  |    6 +-
 .../apache/lens/cube/metadata/TimeRange.java    |  219 ++
 .../apache/lens/cube/metadata/UpdatePeriod.java |  179 +-
 .../timeline/EndsAndHolesPartitionTimeline.java |    2 +-
 .../timeline/RangesPartitionTimeline.java       |    4 +-
 .../timeline/StoreAllPartitionTimeline.java     |    5 +-
 .../cube/parse/AbridgedTimeRangeWriter.java     |   13 +-
 .../lens/cube/parse/AggregateResolver.java      |   30 +-
 .../apache/lens/cube/parse/AliasReplacer.java   |   27 +-
 .../apache/lens/cube/parse/AutoJoinContext.java |  760 +++++++
 .../lens/cube/parse/BetweenTimeRangeWriter.java |   13 +-
 .../apache/lens/cube/parse/CandidateFact.java   |  157 +-
 .../cube/parse/CandidateTablePruneCause.java    |    4 +
 .../lens/cube/parse/CandidateTableResolver.java |   61 +-
 .../lens/cube/parse/CheckColumnMapping.java     |    5 +-
 .../apache/lens/cube/parse/CheckTableNames.java |    5 +-
 .../apache/lens/cube/parse/ColumnResolver.java  |   20 +-
 .../apache/lens/cube/parse/ContextRewriter.java |    6 +-
 .../lens/cube/parse/CubeQueryConfUtil.java      |    6 +
 .../lens/cube/parse/CubeQueryContext.java       |  178 +-
 .../lens/cube/parse/CubeQueryRewriter.java      |   32 +-
 .../lens/cube/parse/CubeSemanticAnalyzer.java   |   41 +-
 .../org/apache/lens/cube/parse/DateUtil.java    |  495 -----
 .../apache/lens/cube/parse/DefaultQueryAST.java |   74 +
 .../cube/parse/DenormalizationResolver.java     |   84 +-
 .../apache/lens/cube/parse/DimHQLContext.java   |   18 +-
 .../lens/cube/parse/DimOnlyHQLContext.java      |   24 +-
 .../lens/cube/parse/ExpressionResolver.java     |  118 +-
 .../apache/lens/cube/parse/FactHQLContext.java  |   66 -
 .../apache/lens/cube/parse/FieldValidator.java  |   17 +-
 .../apache/lens/cube/parse/GroupbyResolver.java |   25 +-
 .../lens/cube/parse/HQLContextInterface.java    |    7 +-
 .../org/apache/lens/cube/parse/HQLParser.java   |   42 +-
 .../org/apache/lens/cube/parse/JoinClause.java  |  144 ++
 .../apache/lens/cube/parse/JoinResolver.java    |  942 +--------
 .../org/apache/lens/cube/parse/JoinTree.java    |  164 ++
 .../lens/cube/parse/LeastPartitionResolver.java |    4 +-
 .../cube/parse/LightestDimensionResolver.java   |    4 +-
 .../lens/cube/parse/LightestFactResolver.java   |    4 +-
 .../cube/parse/MaxCoveringFactResolver.java     |    3 +-
 .../lens/cube/parse/MultiFactHQLContext.java    |   60 +-
 .../PartitionRangesForPartitionColumns.java     |    6 +-
 .../org/apache/lens/cube/parse/PruneCauses.java |   12 +
 .../org/apache/lens/cube/parse/QueryAST.java    |   86 +
 .../lens/cube/parse/SimpleHQLContext.java       |   65 +-
 .../lens/cube/parse/SingleFactHQLContext.java   |  101 -
 .../parse/SingleFactMultiStorageHQLContext.java |  250 ++-
 .../SingleFactSingleStorageHQLContext.java      |   85 +
 .../lens/cube/parse/StorageTableResolver.java   |  203 +-
 .../org/apache/lens/cube/parse/TimeRange.java   |  219 --
 .../apache/lens/cube/parse/TimeRangeWriter.java |    5 +-
 .../lens/cube/parse/TimerangeResolver.java      |   40 +-
 .../apache/lens/cube/parse/UnionHQLContext.java |   63 +-
 .../apache/lens/cube/parse/ValidationRule.java  |    5 +-
 .../FactPartitionBasedQueryCostCalculator.java  |    8 +-
 .../src/main/resources/olap-query-conf.xml      |   31 +
 .../lens/cube/metadata/CubeFactTableTest.java   |  104 +
 .../apache/lens/cube/metadata/DateFactory.java  |  207 ++
 .../cube/metadata/TestCubeMetastoreClient.java  | 1975 ++++++++----------
 .../apache/lens/cube/metadata/TestDateUtil.java |  297 +++
 .../lens/cube/metadata/TestExprColumn.java      |   20 +-
 .../lens/cube/metadata/TestFactPartition.java   |    4 +-
 .../lens/cube/metadata/TestTimePartition.java   |   64 +-
 .../lens/cube/metadata/UpdatePeriodTest.java    |   60 +
 .../timeline/TestPartitionTimelines.java        |  143 +-
 .../apache/lens/cube/parse/CubeTestSetup.java   |  718 ++++---
 .../FieldsCannotBeQueriedTogetherTest.java      |   55 +-
 .../lens/cube/parse/TestAggregateResolver.java  |   97 +-
 .../lens/cube/parse/TestBaseCubeQueries.java    |  245 ++-
 .../cube/parse/TestBetweenTimeRangeWriter.java  |   25 +-
 .../lens/cube/parse/TestCubeRewriter.java       | 1132 +++++-----
 .../apache/lens/cube/parse/TestDateUtil.java    |  295 ---
 .../cube/parse/TestDenormalizationResolver.java |  199 +-
 .../lens/cube/parse/TestExpressionContext.java  |    4 +-
 .../lens/cube/parse/TestExpressionResolver.java |   78 +-
 .../apache/lens/cube/parse/TestHQLParser.java   |   26 +-
 .../lens/cube/parse/TestJoinResolver.java       |  228 +-
 .../lens/cube/parse/TestORTimeRangeWriter.java  |   40 +-
 .../org/apache/lens/cube/parse/TestQuery.java   |  255 +++
 .../lens/cube/parse/TestQueryMetrics.java       |    2 +-
 .../lens/cube/parse/TestQueryRewrite.java       |   39 +-
 .../lens/cube/parse/TestRewriterPlan.java       |    6 +-
 .../apache/lens/cube/parse/TestStorageUtil.java |  115 +-
 .../lens/cube/parse/TestTimeRangeExtractor.java |   47 +-
 .../lens/cube/parse/TestTimeRangeResolver.java  |   32 +-
 .../lens/cube/parse/TestTimeRangeWriter.java    |   66 +-
 .../parse/TestTimeRangeWriterWithQuery.java     |  218 +-
 .../lens/driver/cube/TestMinCostSelector.java   |    8 +-
 lens-cube/src/test/resources/hive-site.xml      |    4 +-
 lens-cube/src/test/resources/log4j.properties   |   36 -
 lens-cube/src/test/resources/logback.xml        |   36 +
 lens-dist/pom.xml                               |   16 +-
 .../src/deb/control/client-control/control      |    8 +-
 .../src/deb/control/server-control/control      |    8 +-
 lens-dist/src/main/assembly/bin-dist.xml        |    7 +-
 lens-docker/lens-test/Dockerfile                |    2 +-
 lens-docker/lens-test/lens-bootstrap.sh         |    2 +-
 lens-driver-es/pom.xml                          |   78 +
 .../lens/driver/es/ASTTraverserForES.java       |  365 ++++
 .../org/apache/lens/driver/es/ESDriver.java     |  392 ++++
 .../apache/lens/driver/es/ESDriverConfig.java   |   88 +
 .../java/org/apache/lens/driver/es/ESQuery.java |   55 +
 .../apache/lens/driver/es/client/ESClient.java  |  169 ++
 .../lens/driver/es/client/ESResultSet.java      |   76 +
 .../driver/es/client/jest/JestClientImpl.java   |   94 +
 .../client/jest/JestResultSetTransformer.java   |  264 +++
 .../driver/es/exceptions/ESClientException.java |   63 +
 .../es/exceptions/InvalidQueryException.java    |   67 +
 .../lens/driver/es/grammar/Aggregations.java    |   64 +
 .../driver/es/grammar/LogicalOperators.java     |   82 +
 .../lens/driver/es/grammar/Predicates.java      |  150 ++
 .../es/translator/ASTCriteriaVisitor.java       |   41 +
 .../lens/driver/es/translator/ASTVisitor.java   |   49 +
 .../es/translator/CriteriaVisitorFactory.java   |   26 +
 .../lens/driver/es/translator/ESVisitor.java    |  178 ++
 .../es/translator/impl/ESAggregateVisitor.java  |  105 +
 .../es/translator/impl/ESCriteriaVisitor.java   |   71 +
 .../impl/ESCriteriaVisitorFactory.java          |   29 +
 .../es/translator/impl/ESTermVisitor.java       |   94 +
 .../src/main/resources/esdriver-default.xml     |   62 +
 .../org/apache/lens/driver/es/ESDriverTest.java |   43 +
 .../org/apache/lens/driver/es/MockClientES.java |  145 ++
 .../lens/driver/es/QueryTranslationTest.java    |  136 ++
 .../driver/es/ResultSetTransformationTest.java  |  573 +++++
 .../lens/driver/es/ScrollingQueryTest.java      |   93 +
 .../src/test/resources/invalid-queries.data     |   37 +
 .../src/test/resources/valid-queries.data       |  101 +
 lens-driver-hive/pom.xml                        |    2 +-
 .../org/apache/lens/driver/hive/HiveDriver.java |  232 +-
 .../lens/driver/hive/HiveInMemoryResultSet.java |    4 +-
 .../driver/hive/HivePersistentResultSet.java    |    9 +-
 .../lens/driver/hive/LensHiveErrorCode.java     |   36 +
 .../apache/lens/driver/hive/TestHiveDriver.java |  240 ++-
 .../lens/driver/hive/TestRemoteHiveDriver.java  |   20 +-
 .../drivers/hive/hive1/hivedriver-site.xml      |   49 +
 .../src/test/resources/hive-site.xml            |    2 +-
 .../src/test/resources/hivedriver-site.xml      |   49 -
 .../src/test/resources/log4j.properties         |   25 -
 lens-driver-hive/src/test/resources/logback.xml |   32 +
 .../src/test/resources/priority_tests.data      |    1 +
 lens-driver-jdbc/pom.xml                        |   26 +-
 .../lens/driver/jdbc/ColumnarSQLRewriter.java   |  392 +++-
 .../jdbc/DataSourceConnectionProvider.java      |   88 +-
 .../org/apache/lens/driver/jdbc/JDBCDriver.java |  103 +-
 .../driver/jdbc/JDBCDriverConfConstants.java    |   57 +-
 .../apache/lens/driver/jdbc/JDBCResultSet.java  |    4 +-
 .../src/main/resources/jdbcdriver-default.xml   |   19 +
 .../driver/jdbc/TestColumnarSQLRewriter.java    |  803 +++++--
 .../jdbc/TestDataSourceConnectionProvider.java  |    7 +-
 .../apache/lens/driver/jdbc/TestJDBCFinal.java  |   19 +-
 .../apache/lens/driver/jdbc/TestJdbcDriver.java |   45 +-
 .../drivers/jdbc/jdbc1/jdbcdriver-site.xml      |   70 +
 .../src/test/resources/hive-site.xml            |    2 +-
 .../src/test/resources/jdbcdriver-site.xml      |   57 -
 lens-examples/pom.xml                           |    2 +-
 .../apache/lens/examples/SampleMetastore.java   |   11 +-
 .../org/apache/lens/examples/SampleQueries.java |   52 +-
 lens-examples/src/main/resources/customer.xml   |    5 +-
 .../src/main/resources/example-job.xml          |   55 +
 lens-examples/src/main/resources/sales-cube.xml |   18 +-
 lens-ml-dist/pom.xml                            |    2 +-
 lens-ml-lib/pom.xml                             |    6 +-
 .../apache/lens/client/LensMLJerseyClient.java  |    5 +-
 .../org/apache/lens/ml/impl/LensMLImpl.java     |    2 +-
 .../org/apache/lens/ml/impl/ModelLoader.java    |   13 +-
 .../lens/ml/server/MLServiceResource.java       |    2 +-
 .../drivers/hive/hive1/hivedriver-site.xml      |   80 +
 lens-ml-lib/src/test/resources/lens-site.xml    |  272 +--
 lens-query-lib/pom.xml                          |    2 +-
 .../lens/lib/query/AbstractFileFormatter.java   |   32 +-
 .../lens/lib/query/FilePersistentFormatter.java |   12 +-
 .../lens/lib/query/HadoopFileFormatter.java     |   14 +-
 .../lens/lib/query/LensFileOutputFormat.java    |   21 +
 .../lens/lib/query/WrappedFileFormatter.java    |   27 +-
 .../apache/lens/lib/query/ZipFileFormatter.java |   13 +-
 .../lib/query/MockLensResultSetMetadata.java    |   65 +
 .../lib/query/TestAbstractFileFormatter.java    |  155 +-
 .../lib/query/TestFilePersistentFormatter.java  |   80 +-
 .../src/test/resources/log4j.properties         |   25 -
 lens-query-lib/src/test/resources/logback.xml   |   32 +
 lens-regression/pom.xml                         |   43 +-
 .../src/additional/java/SampleUdf.java          |   36 +
 .../core/constants/QueryInventory.java          |   57 +
 .../core/helpers/LensServerHelper.java          |    5 -
 .../core/helpers/MetastoreHelper.java           |  218 +-
 .../regression/core/helpers/QueryHelper.java    |   81 +-
 .../core/helpers/ServiceManagerHelper.java      |    8 +-
 .../regression/core/helpers/SessionHelper.java  |    4 -
 .../core/testHelper/BaseTestClass.java          |   41 +
 .../apache/lens/regression/util/AssertUtil.java |   62 +-
 .../apache/lens/regression/util/HadoopUtil.java |   61 +
 .../org/apache/lens/regression/util/Util.java   |  236 ++-
 lens-server-api/pom.xml                         |   18 +-
 .../lens/server/api/LensConfConstants.java      |   99 +-
 .../apache/lens/server/api/LensErrorInfo.java   |   34 +
 .../server/api/driver/AbstractLensDriver.java   |   78 +
 .../lens/server/api/driver/DriverQueryPlan.java |    2 +-
 .../server/api/driver/DriverQueryStatus.java    |    7 +-
 .../server/api/driver/InMemoryResultSet.java    |   24 +-
 .../lens/server/api/driver/LensDriver.java      |   26 +-
 .../lens/server/api/driver/LensResultSet.java   |   11 +-
 .../api/driver/LensResultSetMetadata.java       |   49 +
 .../server/api/driver/PersistentResultSet.java  |   26 +-
 .../lens/server/api/error/LensException.java    |  114 +-
 .../api/error/LensMultiCauseException.java      |   20 +-
 .../server/api/events/AsyncEventListener.java   |   50 +-
 .../server/api/query/AbstractQueryContext.java  |   11 +-
 .../api/query/DriverSelectorQueryContext.java   |    4 +-
 .../server/api/query/FinishedLensQuery.java     |   50 +-
 .../server/api/query/PreparedQueryContext.java  |    7 +-
 .../lens/server/api/query/QueryCancelled.java   |   10 +-
 .../lens/server/api/query/QueryClosed.java      |   10 +-
 .../lens/server/api/query/QueryContext.java     |   68 +-
 .../lens/server/api/query/QueryEnded.java       |    8 +-
 .../server/api/query/QueryExecutionService.java |    5 +-
 .../lens/server/api/query/QueryFailed.java      |   10 +-
 .../server/api/query/QueryOutputFormatter.java  |   13 +-
 .../lens/server/api/query/QuerySuccess.java     |   10 +-
 .../query/collect/ImmutableQueryCollection.java |    6 +
 .../MaxConcurrentDriverQueriesConstraint.java   |   54 +-
 ...oncurrentDriverQueriesConstraintFactory.java |   49 +-
 .../server/api/query/save/SavedQueryHelper.java |   93 +
 .../api/query/save/SavedQueryService.java       |   94 +
 .../exception/MissingParameterException.java    |   46 +
 .../exception/ParameterCollectionException.java |   53 +
 .../save/exception/ParameterValueException.java |   51 +
 .../save/exception/PrivilegeException.java      |   51 +
 .../save/exception/SavedQueryNotFound.java      |   45 +
 .../save/exception/ValueEncodeException.java    |   49 +
 .../param/ParameterCollectionTypeEncoder.java   |   70 +
 .../save/param/ParameterDataTypeEncoder.java    |   91 +
 .../api/query/save/param/ParameterParser.java   |  135 ++
 .../api/query/save/param/ParameterResolver.java |  126 ++
 .../api/scheduler/QuerySchedulerService.java    |   26 -
 .../server/api/scheduler/SchedulerJobStats.java |   29 +
 .../server/api/scheduler/SchedulerService.java  |  238 +++
 .../apache/lens/server/api/util/LensUtil.java   |   10 +
 .../lens/server/api/LensServerAPITestUtil.java  |   47 +
 .../lens/server/api/driver/MockDriver.java      |   34 +-
 .../lens/server/api/query/MockQueryContext.java |   29 +-
 .../api/query/TestAbstractQueryContext.java     |  118 +-
 ...axConcurrentDriverQueriesConstraintTest.java |  181 +-
 .../api/query/cost/MockQueryCostCalculator.java |   31 +
 .../priority/MockQueryPriorityDecider.java      |   30 +
 .../api/query/save/TestParameterParser.java     |   79 +
 .../api/query/save/TestParameterResolution.java |  176 ++
 lens-server/enunciate.xml                       |    3 +-
 lens-server/pom.xml                             |   30 +-
 .../org/apache/lens/server/BaseLensService.java |   36 +-
 .../apache/lens/server/EventServiceImpl.java    |    9 +-
 .../org/apache/lens/server/LensApplication.java |    2 +-
 .../lens/server/LensApplicationListener.java    |    2 +-
 .../apache/lens/server/LensRequestListener.java |    6 +-
 .../java/org/apache/lens/server/LensServer.java |   13 +-
 .../org/apache/lens/server/LensServerConf.java  |   23 +-
 .../org/apache/lens/server/LensServices.java    |    9 +-
 .../lens/server/LensServletContextListener.java |   26 -
 .../LensJAXBValidationExceptionMapper.java      |   53 +
 .../lens/server/error/LensServerErrorCode.java  |   18 +-
 .../UnSupportedQuerySubmitOpException.java      |    6 +-
 .../metastore/CubeMetastoreServiceImpl.java     |   79 +-
 .../apache/lens/server/metastore/JAXBUtils.java |  119 +-
 .../lens/server/metastore/MetastoreApp.java     |    6 +
 .../server/metastore/MetastoreResource.java     |  399 ++--
 .../lens/server/metrics/MetricsServiceImpl.java |   10 +-
 .../lens/server/query/LensPersistentResult.java |   82 +-
 .../apache/lens/server/query/LensServerDAO.java |   56 +-
 .../query/QueryContextPriorityComparator.java   |   48 +
 .../lens/server/query/QueryEndNotifier.java     |  145 +-
 .../server/query/QueryExecutionServiceImpl.java |  759 ++++---
 .../QueryExecutionStatisticsGenerator.java      |   11 +-
 .../lens/server/query/QueryResultPurger.java    |  177 ++
 .../lens/server/query/QueryServiceResource.java |   50 +-
 .../lens/server/query/ResultFormatter.java      |   24 +-
 .../DefaultEstimatedQueryCollection.java        |    9 +-
 .../query/collect/DefaultQueryCollection.java   |   38 +-
 .../IntersectingWaitingQueriesSelector.java     |  107 -
 .../ThreadSafeEstimatedQueryCollection.java     |   22 +-
 .../collect/ThreadSafeQueryCollection.java      |    5 +
 .../collect/UnioningWaitingQueriesSelector.java |   93 +
 .../lens/server/query/save/SavedQueryApp.java   |   48 +
 .../lens/server/query/save/SavedQueryDao.java   |  483 +++++
 .../server/query/save/SavedQueryResource.java   |  307 +++
 .../query/save/SavedQueryServiceImpl.java       |  141 ++
 .../lens/server/quota/QuotaServiceImpl.java     |    2 +-
 .../apache/lens/server/rewrite/RewriteUtil.java |   28 +-
 .../scheduler/QuerySchedulerServiceImpl.java    |   55 -
 .../server/scheduler/SchedulerServiceImpl.java  |  200 ++
 .../server/session/DatabaseResourceService.java |   49 +-
 .../lens/server/session/HiveSessionService.java |    5 +-
 .../lens/server/session/LensSessionImpl.java    |   38 +-
 .../lens/server/session/SessionResource.java    |    2 +-
 .../lens/server/stats/StatisticsService.java    |    7 +-
 .../stats/event/LoggableLensStatistics.java     |    2 +-
 .../stats/store/log/LogStatisticsStore.java     |    2 +-
 .../store/log/StatisticsLogFileScannerTask.java |   13 +-
 .../stats/store/log/StatisticsLogLayout.java    |   18 +-
 .../log/StatisticsLogPartitionHandler.java      |    2 +-
 .../store/log/StatisticsLogRollupHandler.java   |    2 +-
 .../lens/server/ui/MetastoreUIResource.java     |    2 +-
 .../lens/server/ui/QueryServiceUIResource.java  |    9 +-
 .../lens/server/ui/SessionUIResource.java       |    2 +-
 .../LDAPBackedDatabaseUserConfigLoader.java     |    4 +-
 .../server/util/FairPriorityBlockingQueue.java  |   12 +-
 .../apache/lens/server/util/ScannedPaths.java   |  136 +-
 .../apache/lens/server/util/UtilityMethods.java |   20 +-
 .../src/main/resources/lensserver-default.xml   |   66 +-
 .../src/main/webapp/WEB-INF/log4j.properties    |   28 -
 lens-server/src/main/webapp/WEB-INF/web.xml     |    6 -
 .../server/LensAllApplicationJerseyTest.java    |    8 +-
 .../org/apache/lens/server/LensJerseyTest.java  |   32 +-
 .../apache/lens/server/LensServerTestUtil.java  |  298 +++
 .../org/apache/lens/server/LensTestUtil.java    |  298 ---
 .../apache/lens/server/TestLensApplication.java |    2 +-
 .../org/apache/lens/server/TestServerMode.java  |    4 +-
 .../apache/lens/server/TestServerRestart.java   |  118 +-
 .../apache/lens/server/TestServiceProvider.java |    4 +-
 .../common/ErrorResponseExpectedData.java       |    2 +-
 .../lens/server/common/FailingQueryDriver.java  |   45 +
 .../lens/server/common/RestAPITestUtil.java     |  127 +-
 .../server/healthcheck/TestHealthChecks.java    |   11 +-
 .../server/metastore/TestMetastoreService.java  |  596 ++++--
 .../metrics/TestResourceMethodMetrics.java      |   13 +-
 .../server/query/QueryAPIErrorResponseTest.java |   64 +-
 .../QueryContextPriorityComparatorTest.java     |   99 +
 .../lens/server/query/TestEventService.java     |   58 +-
 .../apache/lens/server/query/TestLensDAO.java   |   13 +-
 .../lens/server/query/TestQueryConstraints.java |  235 +++
 .../server/query/TestQueryEndEmailNotifier.java |  227 +-
 .../server/query/TestQueryResultPurger.java     |  100 +
 .../lens/server/query/TestQueryService.java     |  541 ++---
 .../lens/server/query/TestResultFormatting.java |   42 +-
 .../collect/DefaultQueryCollectionTest.java     |   34 +-
 .../IntersectingWaitingQueriesSelectorTest.java |  185 --
 .../server/query/collect/QueryCollectUtil.java  |   39 +-
 .../UnioningWaitingQueriesSelectorTest.java     |  135 ++
 .../ThreadSafeEstimatedQueryCollectionTest.java |   80 +
 .../query/save/TestSavedQueryService.java       |  274 +++
 .../lens/server/rewrite/TestRewriting.java      |   44 +-
 .../session/TestDatabaseResourceService.java    |    4 +-
 .../server/session/TestSessionClassLoaders.java |    4 +-
 .../lens/server/session/TestSessionExpiry.java  |    4 +-
 .../server/session/TestSessionResource.java     |   13 +-
 .../server/stats/TestLogStatisticsStore.java    |   28 +-
 .../stats/TestStatisticsLogFileScannerTask.java |   16 +-
 .../TestStatisticsLogPartitionHandler.java      |   15 +-
 .../lens/server/util/TestScannedPaths.java      |  314 ++-
 .../drivers/hive/hive1/hivedriver-site.xml      |   85 +
 .../drivers/hive/hive2/hivedriver-site.xml      |   85 +
 .../drivers/jdbc/jdbc1/jdbcdriver-site.xml      |   58 +
 .../mock/fail1/failing-query-driver-site.xml    |   32 +
 .../mockHive/mockHive1/hivedriver-site.xml      |   95 +
 .../mockHive/mockHive2/hivedriver-site.xml      |   95 +
 lens-server/src/test/resources/hive-site.xml    |    2 +-
 .../src/test/resources/hivedriver-site.xml      |   80 -
 .../src/test/resources/jdbcdriver-site.xml      |   55 -
 lens-server/src/test/resources/lens-site.xml    |   38 +-
 lens-server/src/test/resources/log4j.properties |   50 -
 lens-server/src/test/resources/logback.xml      |   68 +
 lens-ship-jars/LICENSE                          |  201 ++
 lens-ship-jars/pom.xml                          |   87 +
 lens-ship-jars/src/site/apt/index.apt           |   20 +
 lens-storage-db/pom.xml                         |    2 +-
 .../src/test/resources/hive-site.xml            |    2 +-
 lens-ui/LICENSE                                 |  201 ++
 lens-ui/README.markdown                         |   85 +
 lens-ui/app/actions/AdhocQueryActions.js        |  366 ++++
 lens-ui/app/actions/LoginActions.js             |   51 +
 lens-ui/app/adapters/AdhocQueryAdapter.js       |  219 ++
 lens-ui/app/adapters/AuthenticationAdapter.js   |   55 +
 lens-ui/app/adapters/BaseAdapter.js             |   93 +
 lens-ui/app/adapters/XMLAdapter.js              |   37 +
 lens-ui/app/app.js                              |   62 +
 lens-ui/app/components/AboutComponent.js        |   33 +
 lens-ui/app/components/AdhocQueryComponent.js   |   46 +
 lens-ui/app/components/AppComponent.js          |   39 +
 lens-ui/app/components/CubeSchemaComponent.js   |  196 ++
 lens-ui/app/components/CubeTreeComponent.js     |  179 ++
 lens-ui/app/components/DatabaseComponent.js     |  127 ++
 lens-ui/app/components/HeaderComponent.js       |   87 +
 lens-ui/app/components/LoaderComponent.js       |   34 +
 lens-ui/app/components/LoginComponent.js        |  109 +
 lens-ui/app/components/LogoutComponent.js       |   41 +
 lens-ui/app/components/QueryBoxComponent.js     |  527 +++++
 .../components/QueryDetailResultComponent.js    |  189 ++
 .../app/components/QueryOperationsComponent.js  |   92 +
 .../app/components/QueryParamRowComponent.js    |  163 ++
 lens-ui/app/components/QueryParamsComponent.js  |  154 ++
 lens-ui/app/components/QueryPreviewComponent.js |  175 ++
 lens-ui/app/components/QueryResultsComponent.js |  120 ++
 .../RequireAuthenticationComponent.js           |   37 +
 lens-ui/app/components/SavedQueriesComponent.js |  180 ++
 .../components/SavedQueryPreviewComponent.js    |  142 ++
 lens-ui/app/components/SidebarComponent.js      |   38 +
 lens-ui/app/components/TableSchemaComponent.js  |  135 ++
 lens-ui/app/components/TableTreeComponent.js    |  236 +++
 lens-ui/app/constants/AdhocQueryConstants.js    |   58 +
 lens-ui/app/constants/AppConstants.js           |   28 +
 lens-ui/app/dispatcher/AppDispatcher.js         |   14 +
 lens-ui/app/stores/AdhocQueryStore.js           |  135 ++
 lens-ui/app/stores/CubeStore.js                 |   83 +
 lens-ui/app/stores/DatabaseStore.js             |   62 +
 lens-ui/app/stores/SavedQueryStore.js           |   99 +
 lens-ui/app/stores/TableStore.js                |  106 +
 lens-ui/app/stores/UserStore.js                 |  122 ++
 lens-ui/app/styles/css/global.css               |   40 +
 lens-ui/app/styles/css/login.css                |   56 +
 lens-ui/app/styles/css/query-component.css      |   33 +
 lens-ui/app/styles/css/tree.css                 |   50 +
 lens-ui/app/styles/less/globals.less            |   22 +
 lens-ui/app/utils/ErrorParser.js                |   53 +
 lens-ui/config.json                             |    4 +
 lens-ui/index.html                              |  101 +
 lens-ui/package.json                            |   56 +
 lens-ui/pom.xml                                 |   51 +
 lens-ui/server.js                               |   80 +
 lens-ui/webpack.config.js                       |   57 +
 pom.xml                                         |  606 +++++-
 src/site/apt/admin/config-server.apt            |   52 +-
 src/site/apt/admin/config.apt                   |  132 +-
 src/site/apt/admin/esdriver-config.apt          |   41 +
 src/site/apt/admin/jdbcdriver-config.apt        |   54 +-
 src/site/apt/admin/monitoring.apt               |   32 +-
 src/site/apt/admin/server-components.apt        |   74 +-
 src/site/apt/developer/commit.apt               |   58 +-
 src/site/apt/developer/contribute.apt           |  130 +-
 src/site/apt/index.apt                          |   14 +-
 src/site/apt/lenshome/install-and-run.apt       |   31 +-
 .../apt/lenshome/pseudo-distributed-setup.apt   |    2 +-
 src/site/apt/lenshome/quick-start.apt           |    6 +-
 src/site/apt/releases/compatibility.apt         |    4 +-
 src/site/apt/releases/download.apt              |   10 +-
 src/site/apt/releases/release-history.apt       |   54 +-
 src/site/apt/user/cli.apt                       |   68 +-
 src/site/apt/user/client-config.apt             |   10 +-
 src/site/apt/user/index.apt                     |  117 +-
 src/site/apt/user/olap-cube.apt                 |  407 +++-
 src/site/apt/user/olap-query-conf.apt           |   50 +-
 .../resources/images/apache-incubator-logo.png  |  Bin 4234 -> 0 bytes
 src/site/site.xml                               |   10 +-
 tools/conf-pseudo-distr/client/log4j.properties |   38 -
 tools/conf-pseudo-distr/client/logback.xml      |   56 +
 .../drivers/hive/hive1/hivedriver-site.xml      |   57 +
 .../drivers/jdbc/jdbc1/jdbcdriver-site.xml      |   50 +
 .../server/hivedriver-site.xml                  |   57 -
 .../server/jdbcdriver-site.xml                  |   50 -
 tools/conf-pseudo-distr/server/lens-site.xml    |    2 +-
 tools/conf-pseudo-distr/server/log4j.properties |   85 -
 tools/conf-pseudo-distr/server/logback.xml      |  136 ++
 tools/conf/client/log4j.properties              |   38 -
 tools/conf/client/logback.xml                   |   56 +
 .../drivers/hive/hive1/hivedriver-site.xml      |   41 +
 .../drivers/jdbc/jdbc1/jdbcdriver-site.xml      |   50 +
 tools/conf/server/hivedriver-site.xml           |   41 -
 tools/conf/server/jdbcdriver-site.xml           |   50 -
 tools/conf/server/lens-site.xml                 |    2 +-
 tools/conf/server/log4j.properties              |   85 -
 tools/conf/server/logback.xml                   |  136 ++
 tools/scripts/generate-site-public.sh           |    2 +-
 591 files changed, 34788 insertions(+), 12083 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/25a17dfc/lens-ml-lib/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lens/blob/25a17dfc/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLJerseyClient.java
----------------------------------------------------------------------
diff --cc lens-ml-lib/src/main/java/org/apache/lens/client/LensMLJerseyClient.java
index 20024a1,2ccdf2a..f693cb7
--- a/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLJerseyClient.java
+++ b/lens-ml-lib/src/main/java/org/apache/lens/client/LensMLJerseyClient.java
@@@ -21,23 -21,24 +21,20 @@@ package org.apache.lens.client
  import java.util.List;
  import java.util.Map;
  
 -import javax.ws.rs.NotFoundException;
  import javax.ws.rs.client.Client;
- import javax.ws.rs.client.ClientBuilder;
  import javax.ws.rs.client.Entity;
  import javax.ws.rs.client.WebTarget;
 -import javax.ws.rs.core.Form;
  import javax.ws.rs.core.MediaType;
  
 +import org.apache.lens.api.APIResult;
  import org.apache.lens.api.LensSessionHandle;
  import org.apache.lens.api.StringList;
 -import org.apache.lens.ml.api.ModelMetadata;
 -import org.apache.lens.ml.api.TestReport;
 +import org.apache.lens.ml.api.*;
 +import org.apache.lens.server.api.error.LensException;
  
 -import org.apache.hadoop.conf.Configuration;
 -
 -import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 -import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 -import org.glassfish.jersey.media.multipart.FormDataMultiPart;
 +import org.apache.commons.logging.Log;
 +import org.apache.commons.logging.LogFactory;
  
- import org.glassfish.jersey.media.multipart.MultiPartFeature;
- 
  import lombok.extern.slf4j.Slf4j;
  
  /*
@@@ -93,16 -82,8 +90,16 @@@ public class LensMLJerseyClient 
      this.sessionHandle = sessionHandle;
    }
  
 +  public void close() {
 +    try {
 +      connection.close();
 +    } catch (Exception exc) {
 +      LOG.error("Error closing connection", exc);
 +    }
 +  }
 +
    protected WebTarget getMLWebTarget() {
-     Client client = ClientBuilder.newBuilder().register(MultiPartFeature.class).build();
+     Client client = connection.buildClient();
      LensConnectionParams connParams = connection.getLensConnectionParams();
      String baseURI = connParams.getBaseConnectionUrl();
      String mlURI = connParams.getConf().get(LENS_ML_RESOURCE_PATH, DEFAULT_ML_RESOURCE_PATH);

http://git-wip-us.apache.org/repos/asf/lens/blob/25a17dfc/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/LensMLImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lens/blob/25a17dfc/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/ModelLoader.java
----------------------------------------------------------------------
diff --cc lens-ml-lib/src/main/java/org/apache/lens/ml/impl/ModelLoader.java
index dc23807,8a69545..fa80a56
--- a/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/ModelLoader.java
+++ b/lens-ml-lib/src/main/java/org/apache/lens/ml/impl/ModelLoader.java
@@@ -36,48 -34,39 +36,47 @@@ import org.apache.hadoop.hive.conf.Hive
  
  import com.google.common.cache.Cache;
  import com.google.common.cache.CacheBuilder;
 -import lombok.extern.slf4j.Slf4j;
  
 -/**
 - * Load ML models from a FS location.
 - */
 -@Slf4j
 -public final class ModelLoader {
 -  private ModelLoader() {
 -  }
 -
 -  /** The Constant MODEL_PATH_BASE_DIR. */
 +public class ModelLoader {
 +  /**
 +   * The Constant MODEL_PATH_BASE_DIR.
 +   */
    public static final String MODEL_PATH_BASE_DIR = "lens.ml.model.basedir";
  
 -  /** The Constant MODEL_PATH_BASE_DIR_DEFAULT. */
 +  /**
 +   * The Constant MODEL_PATH_BASE_DIR_DEFAULT.
 +   */
    public static final String MODEL_PATH_BASE_DIR_DEFAULT = "file:///tmp";
 -
 -  /** The Constant TEST_REPORT_BASE_DIR. */
 +  /**
 +   * The Constant LOG.
 +   */
 +  public static final Log LOG = LogFactory.getLog(ModelLoader.class);
 +  /**
 +   * The Constant TEST_REPORT_BASE_DIR.
 +   */
    public static final String TEST_REPORT_BASE_DIR = "lens.ml.test.basedir";
-   /**
-    * The Constant TEST_REPORT_BASE_DIR_DEFAULT.
-    */
-   public static final String TEST_REPORT_BASE_DIR_DEFAULT = "file:///tmp/ml_reports";
-   /**
-    * The Constant MODEL_CACHE_SIZE.
-    */
+ 
+   /** The Constant TEST_REPORT_BASE_DIR_DEFAULT. */
+   public static final String TEST_REPORT_BASE_DIR_DEFAULT = MODEL_PATH_BASE_DIR_DEFAULT + "/ml_reports";
+ 
+   // Model cache settings
+   /** The Constant MODEL_CACHE_SIZE. */
    public static final long MODEL_CACHE_SIZE = 10;
  
 -  /** The Constant MODEL_CACHE_TIMEOUT. */
 +  // Model cache settings
 +  /**
 +   * The Constant MODEL_CACHE_TIMEOUT.
 +   */
    public static final long MODEL_CACHE_TIMEOUT = 3600000L; // one hour
 -
 -  /** The model cache. */
 -  private static Cache<Path, MLModel> modelCache = CacheBuilder.newBuilder().maximumSize(MODEL_CACHE_SIZE)
 +  /**
 +   * The model cache.
 +   */
 +  private static Cache<Path, TrainedModel> modelCache = CacheBuilder.newBuilder().maximumSize(MODEL_CACHE_SIZE)
      .expireAfterAccess(MODEL_CACHE_TIMEOUT, TimeUnit.MILLISECONDS).build();
  
 +  private ModelLoader() {
 +  }
 +
    /**
     * Gets the model location.
     *

http://git-wip-us.apache.org/repos/asf/lens/blob/25a17dfc/lens-ml-lib/src/main/java/org/apache/lens/ml/server/MLServiceResource.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lens/blob/25a17dfc/lens-ml-lib/src/test/resources/lens-site.xml
----------------------------------------------------------------------
diff --cc lens-ml-lib/src/test/resources/lens-site.xml
index 2d32d20,9be7850..854f861
--- a/lens-ml-lib/src/test/resources/lens-site.xml
+++ b/lens-ml-lib/src/test/resources/lens-site.xml
@@@ -23,141 -23,140 +23,141 @@@
  <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
  
  <configuration>
-     <property>
-         <name>lens.server.drivers</name>
-         <value>org.apache.lens.driver.hive.HiveDriver</value>
-     </property>
- 
-     <property>
-         <name>test.lens.site.key</name>
-         <value>gsvalue</value>
-     </property>
- 
-     <property>
-         <name>lens.server.enable.console.metrics</name>
-         <value>false</value>
-         <description>Enable metrics to be reported on console</description>
-     </property>
- 
-     <property>
-         <name>lens.server.persist.location</name>
-         <value>target/persist-dir</value>
-     </property>
- 
-     <property>
-         <name>lens.query.result.parent.dir</name>
-         <value>target/lens-results</value>
-     </property>
- 
-     <property>
-         <name>hive.server2.query.log.dir</name>
-         <value>target/query-logs</value>
-     </property>
- 
-     <property>
-         <name>hive.server2.authentication</name>
-         <value>CUSTOM</value>
-     </property>
- 
-     <property>
-         <name>hive.server2.custom.authentication.class</name>
-         <value>org.apache.lens.server.auth.FooBarAuthenticationProvider</value>
-     </property>
- 
- 
-     <property>
-         <name>lens.query.enable.mail.notify</name>
-         <value>false</value>
-         <description>When a query ends, whether to notify the submitter by mail or not.</description>
-     </property>
- 
-     <property>
-         <name>lens.server.mail.from.address</name>
-         <value>blah@company.com</value>
-         <description>The from field in the notifier mail to the submitter.</description>
-     </property>
- 
-     <property>
-         <name>lens.server.mail.host</name>
-         <value>localhost</value>
-         <description>SMTP Host for sending mail</description>
-     </property>
- 
-     <property>
-         <name>lens.server.mail.port</name>
-         <value>25000</value>
-         <description>SMTP Port</description>
-     </property>
- 
-     <property>
-         <name>lens.server.ws.resourcenames</name>
-         <value>session,metastore,query,quota,scheduler,index,test,ml</value>
-         <description>These JAX-RS resources would be started in the specified order when lens-server starts up
-         </description>
-     </property>
- 
-     <property>
-         <name>lens.server.test.ws.resource.impl</name>
-         <value>org.apache.lens.server.TestResource</value>
-         <description>Implementation class for Test Resource</description>
-     </property>
- 
-     <property>
-         <name>lens.server.ml.ws.resource.impl</name>
-         <value>org.apache.lens.ml.server.MLServiceResource</value>
-         <description>Implementation class for ML Service Resource</description>
-     </property>
- 
-     <property>
-         <name>lens.server.user.resolver.type</name>
-         <value>FIXED</value>
-     </property>
-     <property>
-         <name>lens.server.user.resolver.fixed.value</name>
-         <value>testlensuser</value>
-     </property>
-     <property>
-         <name>lens.server.domain</name>
-         <value>localhost</value>
-     </property>
- 
-     <property>
-         <!-- Immediately insert all finished queries to DB -->
-         <name>lens.server.max.finished.queries</name>
-         <value>1</value>
-     </property>
- 
-     <property>
-         <name>lens.server.db.jdbc.url</name>
-         <value>jdbc:hsqldb:target/queries.db</value>
-     </property>
- 
-     <property>
-         <name>lens.server.servicenames</name>
-         <value>session,query,metastore,scheduler,quota,ml,mocknonlens</value>
-     </property>
- 
-     <property>
-         <name>lens.server.ml.service.impl</name>
-         <value>org.apache.lens.ml.server.MLServiceImpl</value>
-         <description>Implementation class for ML service</description>
-     </property>
- 
-     <property>
-         <name>lens.ml.drivers</name>
-         <value>org.apache.lens.ml.algo.spark.SparkMLDriver</value>
-     </property>
- 
-     <property>
-         <name>lens.ml.sparkdriver.spark.master</name>
-         <value>local</value>
-     </property>
- 
- 
-     <property>
-         <name>lens.server.mocknonlens.service.impl</name>
-         <value>org.apache.lens.server.MockNonLensService</value>
-         <description>Implementation class for session service</description>
-     </property>
++
+   <property>
+     <name>lens.server.drivers</name>
+     <value>hive:org.apache.lens.driver.hive.HiveDriver</value>
+   </property>
+ 
+   <property>
+     <name>test.lens.site.key</name>
+     <value>gsvalue</value>
+   </property>
+ 
+   <property>
+     <name>lens.server.enable.console.metrics</name>
+     <value>false</value>
+     <description>Enable metrics to be reported on console</description>
+   </property>
+ 
+   <property>
+     <name>lens.server.persist.location</name>
+     <value>target/persist-dir</value>
+   </property>
+ 
+   <property>
+     <name>lens.query.result.parent.dir</name>
+     <value>target/lens-results</value>
+   </property>
+ 
+   <property>
+     <name>hive.server2.query.log.dir</name>
+     <value>target/query-logs</value>
+   </property>
+ 
+   <property>
+     <name>hive.server2.authentication</name>
+     <value>CUSTOM</value>
+   </property>
+ 
+   <property>
+     <name>hive.server2.custom.authentication.class</name>
+     <value>org.apache.lens.server.auth.FooBarAuthenticationProvider</value>
+   </property>
+ 
+ 
+   <property>
+     <name>lens.query.enable.mail.notify</name>
+     <value>false</value>
+     <description>When a query ends, whether to notify the submitter by mail or not.</description>
+   </property>
+ 
+   <property>
+     <name>lens.server.mail.from.address</name>
+     <value>blah@company.com</value>
+     <description>The from field in the notifier mail to the submitter.</description>
+   </property>
+ 
+   <property>
+     <name>lens.server.mail.host</name>
+     <value>localhost</value>
+     <description>SMTP Host for sending mail</description>
+   </property>
+ 
+   <property>
+     <name>lens.server.mail.port</name>
+     <value>25000</value>
+     <description>SMTP Port</description>
+   </property>
+ 
+   <property>
+     <name>lens.server.ws.resourcenames</name>
+     <value>session,metastore,query,quota,scheduler,index,test,ml</value>
+     <description>These JAX-RS resources would be started in the specified order when lens-server starts up</description>
+   </property>
+ 
+   <property>
+     <name>lens.server.test.ws.resource.impl</name>
+     <value>org.apache.lens.server.TestResource</value>
+     <description>Implementation class for Test Resource</description>
+   </property>
+ 
+   <property>
+     <name>lens.server.ml.ws.resource.impl</name>
+     <value>org.apache.lens.ml.server.MLServiceResource</value>
+     <description>Implementation class for ML Service Resource</description>
+   </property>
+ 
+   <property>
+     <name>lens.server.user.resolver.type</name>
+     <value>FIXED</value>
+   </property>
+   <property>
+     <name>lens.server.user.resolver.fixed.value</name>
+     <value>testlensuser</value>
+   </property>
+   <property>
+     <name>lens.server.domain</name>
+     <value>localhost</value>
+   </property>
+ 
+   <property>
+     <!-- Immediately insert all finished queries to DB -->
+     <name>lens.server.max.finished.queries</name>
+     <value>1</value>
+   </property>
+ 
+   <property>
+     <name>lens.server.db.jdbc.url</name>
+     <value>jdbc:hsqldb:target/queries.db</value>
+   </property>
+ 
+   <property>
+     <name>lens.server.servicenames</name>
+     <value>session,query,metastore,scheduler,quota,ml,mocknonlens</value>
+   </property>
+ 
+   <property>
+     <name>lens.server.ml.service.impl</name>
+     <value>org.apache.lens.ml.server.MLServiceImpl</value>
+     <description>Implementation class for ML service</description>
+   </property>
+ 
+   <property>
+     <name>lens.ml.drivers</name>
+     <value>org.apache.lens.ml.algo.spark.SparkMLDriver</value>
+   </property>
+ 
+   <property>
+     <name>lens.ml.sparkdriver.spark.master</name>
+     <value>local</value>
+   </property>
+ 
+ 
+   <property>
+     <name>lens.server.mocknonlens.service.impl</name>
+     <value>org.apache.lens.server.MockNonLensService</value>
+     <description>Implementation class for session service</description>
+   </property>
  
  </configuration>


[33/50] [abbrv] lens git commit: LENS-879 : Adds Base framework for scheduler

Posted by sh...@apache.org.
LENS-879 : Adds Base framework for scheduler


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/7e9e47ea
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/7e9e47ea
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/7e9e47ea

Branch: refs/heads/LENS-581
Commit: 7e9e47ea66109470327607fafb26045080fc969d
Parents: 0e4c18c
Author: Ajay Yadava <aj...@apache.org>
Authored: Thu Dec 10 16:28:40 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Thu Dec 10 16:28:40 2015 +0530

----------------------------------------------------------------------
 .../lens/api/query/SchedulerJobHandle.java      |  82 ++++++
 .../apache/lens/api/query/SchedulerJobInfo.java |  68 +++++
 .../api/query/SchedulerJobInstanceHandle.java   |  86 ++++++
 .../api/query/SchedulerJobInstanceInfo.java     |  84 ++++++
 .../src/main/resources/scheduler-job-0.1.xsd    | 283 +++++++++++++++++++
 .../src/main/resources/example-job.xml          |  55 ++++
 .../api/scheduler/QuerySchedulerService.java    |  26 --
 .../server/api/scheduler/SchedulerJobStats.java |  29 ++
 .../server/api/scheduler/SchedulerService.java  | 238 ++++++++++++++++
 .../lens/server/metrics/MetricsServiceImpl.java |   4 +-
 .../scheduler/QuerySchedulerServiceImpl.java    |  55 ----
 .../server/scheduler/SchedulerServiceImpl.java  | 200 +++++++++++++
 .../src/main/resources/lensserver-default.xml   |   2 +-
 .../server/healthcheck/TestHealthChecks.java    |   4 +-
 pom.xml                                         |   2 +-
 src/site/apt/admin/config.apt                   |   2 +-
 16 files changed, 1132 insertions(+), 88 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobHandle.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobHandle.java b/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobHandle.java
new file mode 100644
index 0000000..aa4dc13
--- /dev/null
+++ b/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobHandle.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lens.api.query;
+
+import java.io.Serializable;
+import java.util.UUID;
+
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.commons.lang.StringUtils;
+
+import lombok.AccessLevel;
+import lombok.AllArgsConstructor;
+import lombok.EqualsAndHashCode;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+
+/**
+ * Handle for <code>SchedulerJob</code>.
+ */
+@XmlRootElement
+@AllArgsConstructor
+@NoArgsConstructor(access = AccessLevel.PROTECTED)
+@EqualsAndHashCode(callSuper = false)
+public class SchedulerJobHandle implements Serializable {
+
+  /**
+   * The Constant serialVersionUID.
+   */
+  private static final long serialVersionUID = 1L;
+
+  /**
+   * The handle id.
+   */
+  @XmlElement
+  @Getter
+  private UUID handleId;
+
+  /**
+   * From string.
+   *
+   * @param handle the handle for scheduler job
+   * @return the handle for
+   */
+  public static SchedulerJobHandle fromString(String handle) {
+    return new SchedulerJobHandle(UUID.fromString(handle));
+  }
+
+  public String getHandleIdString() {
+    if (handleId == null) {
+      return StringUtils.EMPTY;
+    }
+    return handleId.toString();
+  }
+
+  /**
+   * String representation of the SchedulerJobHandle.
+   * @return string representation of the handleId
+   */
+  @Override
+  public String toString() {
+    return getHandleIdString();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInfo.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInfo.java b/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInfo.java
new file mode 100644
index 0000000..1ae7894
--- /dev/null
+++ b/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInfo.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.api.query;
+
+import org.apache.lens.api.scheduler.XJob;
+
+import lombok.Data;
+/**
+ * POJO to represent the <code>job</code> table in the database.
+ */
+@Data
+public class SchedulerJobInfo {
+
+  /**
+   * ID of the job.
+   * Each job has a unique id which can be used to query it.
+   *
+   * @param id new value for ID.
+   * @return ID for the current job.
+   */
+  private SchedulerJobHandle id;
+
+  /**
+   * Definition of the job scheduled.
+   */
+  private XJob job;
+
+  /**
+   * @param userName userName to be set.
+   * @return name of the user who scheduled this job.
+   */
+  private String userName;
+
+  /**
+   * @param status status of this job.
+   * @return current status of this job
+   */
+  private String status;
+
+  /**
+   * @param createdOn time to be set as createdOn.
+   * @return time when this job was submitted.
+   */
+  private long createdOn;
+
+  /**
+   * @param modifiedOn time to be set as modifiedOn time for this job.
+   * @return last modified time for this job
+   */
+  private long modifiedOn;
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInstanceHandle.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInstanceHandle.java b/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInstanceHandle.java
new file mode 100644
index 0000000..c124a38
--- /dev/null
+++ b/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInstanceHandle.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lens.api.query;
+
+import java.io.Serializable;
+import java.util.UUID;
+
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.commons.lang.StringUtils;
+
+import lombok.AccessLevel;
+import lombok.AllArgsConstructor;
+import lombok.EqualsAndHashCode;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+
+/**
+ * Handle for <code>SchedulerJobInstance</code>
+ */
+@XmlRootElement
+@AllArgsConstructor
+@NoArgsConstructor(access = AccessLevel.PROTECTED)
+@EqualsAndHashCode(callSuper = false)
+public class SchedulerJobInstanceHandle implements Serializable {
+
+  /**
+   * The Constant serialVersionUID.
+   */
+  private static final long serialVersionUID = 1L;
+
+  /**
+   * The handle id.
+   */
+  @XmlElement
+  @Getter
+  private UUID handleId;
+
+  /**
+   * From string.
+   *
+   * @param handle the handle
+   * @return the <code>SchedulerJobInstance</code>'s handle
+   */
+  public static SchedulerJobInstanceHandle fromString(String handle) {
+    return new SchedulerJobInstanceHandle(UUID.fromString(handle));
+  }
+
+  /**
+   * Returns handle id as a string.
+   * @return handleId as a string.
+   */
+  public String getHandleIdString() {
+    if (handleId == null) {
+      return StringUtils.EMPTY;
+    }
+    return handleId.toString();
+  }
+
+  /**
+   * String representation of the SchedulerJobInstanceHandle.
+   * @return the handleID as a string
+   */
+  @Override
+  public String toString() {
+    return getHandleIdString();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInstanceInfo.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInstanceInfo.java b/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInstanceInfo.java
new file mode 100644
index 0000000..9146cf0
--- /dev/null
+++ b/lens-api/src/main/java/org/apache/lens/api/query/SchedulerJobInstanceInfo.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.api.query;
+
+import org.apache.lens.api.LensSessionHandle;
+
+import lombok.Data;
+/**
+ * POJO for an instance of SchedulerJob.
+ */
+@Data
+public class SchedulerJobInstanceInfo {
+
+  /**
+   * @param id new id for the instance of scheduler job.
+   * @return unique id for this instance of scheduler job.
+   */
+  private SchedulerJobInstanceHandle id;
+
+  /**
+   * @param jobId new id for the scheduler job.
+   * @return id for the scheduler job to which this instance belongs.
+   */
+  private SchedulerJobHandle jobId;
+
+  /**
+   * @param sessionHandle new session handle.
+   * @return session handle for this instance.
+   */
+  private LensSessionHandle sessionHandle;
+
+  /**
+   * @param startTime start time to be set for the instance.
+   * @return actual start time of this instance.
+   */
+  private long startTime;
+
+  /**
+   * @param endTime end time to be set for the instance.
+   * @return actual finish time of this instance.
+   */
+  private long endTime;
+
+  /**
+   * @param resultPath result path to be set.
+   * @return result path of this instance.
+   */
+  private String resultPath;
+
+  /**
+   * @param query query to be set
+   * @return query of this instance.
+   */
+  private String query;
+
+  /**
+   * @param status status to be set.
+   * @return status of this instance.
+   */
+  private String status;
+
+  /**
+   * @param createdOn time to be set as created_on time for the instance.
+   * @return created_on time of this instance.
+   */
+  private long createdOn;
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-api/src/main/resources/scheduler-job-0.1.xsd
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/scheduler-job-0.1.xsd b/lens-api/src/main/resources/scheduler-job-0.1.xsd
new file mode 100644
index 0000000..4e6c68b
--- /dev/null
+++ b/lens-api/src/main/resources/scheduler-job-0.1.xsd
@@ -0,0 +1,283 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           attributeFormDefault="unqualified" elementFormDefault="qualified"
+           targetNamespace="uri:lens:job:0.1" xmlns="uri:lens:job:0.1"
+           xmlns:jaxb="http://java.sun.com/xml/ns/jaxb" jaxb:version="2.1">
+
+    <xs:annotation>
+        <xs:appinfo>
+            <jaxb:schemaBindings>
+                <jaxb:package name="org.apache.lens.api.scheduler" />
+            </jaxb:schemaBindings>
+        </xs:appinfo>
+    </xs:annotation>
+
+    <xs:element name="job" type="x_job" />
+
+    <xs:complexType name="x_job">
+        <xs:annotation>
+            <xs:documentation>
+                XJob represents a job that can be scheduled on lens.
+            </xs:documentation>
+        </xs:annotation>
+        <xs:sequence>
+            <xs:element type="non_empty_string" name="name">
+                <xs:annotation>
+                    <xs:documentation>
+                        Name of the scheduled job. It need not be unique.
+                    </xs:documentation>
+                </xs:annotation>
+            </xs:element>
+
+            <xs:element type="x_execution" name="execution">
+                <xs:annotation>
+                    <xs:documentation>
+                        Contains the information for the executable and session.
+                    </xs:documentation>
+                </xs:annotation>
+            </xs:element>
+
+            <xs:element type="x_trigger" name="trigger">
+                <xs:annotation>
+                    <xs:documentation>
+                        Trigger is the gating condition for an instance of job to be launched.
+                        Currently only time based triggers are supported.
+                    </xs:documentation>
+                </xs:annotation>
+            </xs:element>
+
+            <xs:element type="map_type" name="job_conf" minOccurs="0" maxOccurs="unbounded" >
+                <xs:annotation>
+                    <xs:documentation>
+                        Configuration for the job e.g. number of retries etc.
+                    </xs:documentation>
+                </xs:annotation>
+            </xs:element>
+
+            <xs:element type="xs:dateTime" name="start_time">
+                <xs:annotation>
+                    <xs:documentation>
+                        Start time of this job's schedule  e.g. 2013-11-30T00:00:00
+                    </xs:documentation>
+                </xs:annotation>
+            </xs:element>
+
+            <xs:element type="xs:dateTime" name="end_time">
+                <xs:annotation>
+                    <xs:documentation>
+                        End time of this job's schedule e.g. 2013-11-30T00:00:00
+                    </xs:documentation>
+                </xs:annotation>
+            </xs:element>
+        </xs:sequence>
+    </xs:complexType>
+
+    <xs:complexType name="x_execution">
+        <xs:annotation>
+            <xs:documentation>
+                Task which has to be scheduled. Currently only query is supported.
+            </xs:documentation>
+        </xs:annotation>
+        <xs:sequence>
+            <!-- Session related information for the job like db, conf, resource_path etc. -->
+            <xs:element type="x_session_type" name="session">
+            </xs:element>
+            <xs:choice>
+                <!-- Executable for the job e.g. query.
+                See x_job_query documentation for detailed documentation on query -->
+                <xs:element name="query" type="x_job_query" />
+            </xs:choice>
+        </xs:sequence>
+    </xs:complexType>
+
+    <xs:complexType name="x_session_type">
+        <xs:sequence>
+            <xs:element name="db" type="xs:string">
+                <xs:annotation>
+                    <xs:documentation>
+                        Database name to be used in session e.g. myDB
+                    </xs:documentation>
+                </xs:annotation>
+            </xs:element>
+            <xs:element name="conf" type="map_type" minOccurs="0" maxOccurs="unbounded">
+                <xs:annotation>
+                    <xs:documentation>
+                        Configuration for session.
+                    </xs:documentation>
+                </xs:annotation>
+            </xs:element>
+            <xs:element type="xs:string" name="resource_path" minOccurs="0" maxOccurs="unbounded">
+                <xs:annotation>
+                    <xs:documentation>
+                        Path for resources like jars etc. e.g. /path/to/my/jar
+                    </xs:documentation>
+                </xs:annotation>
+            </xs:element>
+        </xs:sequence>
+    </xs:complexType>
+
+    <xs:complexType name="x_job_query">
+        <xs:annotation>
+            <xs:documentation>
+                A query which is executed in each run of the job.
+            </xs:documentation>
+        </xs:annotation>
+        <xs:sequence>
+            <xs:element name="query" type="non_empty_string">
+                <xs:annotation>
+                    <xs:documentation>
+                        A query which is executed in each run of the job.
+                    </xs:documentation>
+                </xs:annotation>
+            </xs:element>
+            <xs:element name="conf" type="map_type" minOccurs="0" maxOccurs="unbounded">
+                <xs:annotation>
+                    <xs:documentation>
+                        Configuration for the query which is executed
+                    </xs:documentation>
+                </xs:annotation>
+            </xs:element>
+
+        </xs:sequence>
+    </xs:complexType>
+
+    <xs:complexType name="x_trigger">
+        <xs:annotation>
+            <xs:documentation>
+                Trigger is the gating condition for an instance of job to be launched.
+                Currently only time based triggers are supported.
+            </xs:documentation>
+        </xs:annotation>
+        <xs:choice>
+            <!-- Frequency for the job.
+            Frequency of schedule. Frequency can either be
+                1) an enum from (DAILY, WEEKLY, MONTHLY, QUARTERLY, YEARLY)
+                2) a cron_expression
+            -->
+            <xs:element name="frequency" type="x_frequency" />
+        </xs:choice>
+    </xs:complexType>
+
+    <xs:complexType name="map_type">
+        <xs:annotation>
+            <xs:documentation>
+                A type to represent key, value pairs.
+            </xs:documentation>
+        </xs:annotation>
+        <xs:sequence>
+            <xs:element name="key" type="non_empty_string" />
+            <xs:element name="value" type="xs:string" />
+        </xs:sequence>
+    </xs:complexType>
+
+    <xs:complexType name="x_frequency">
+        <xs:annotation>
+            <xs:documentation>
+                Frequency of schedule. Frequency can either be
+                1) an enum from (DAILY, WEEKLY, MONTHLY, QUARTERLY, YEARLY)
+                2) a cron_expression
+            </xs:documentation>
+        </xs:annotation>
+        <xs:choice>
+            <xs:element name="enum" type="x_frequency_enum" />
+            <xs:element name="cron_expression" type="x_cron_expression" />
+        </xs:choice>
+        <xs:attribute type="xs:string" name="timezone" />
+    </xs:complexType>
+
+    <xs:simpleType name="x_frequency_enum">
+        <xs:annotation>
+            <xs:documentation>
+                Valid enum expressions which can be used as frequency.
+            </xs:documentation>
+        </xs:annotation>
+        <xs:restriction base="xs:string">
+            <xs:enumeration value="DAILY" />
+            <xs:enumeration value="WEEKLY" />
+            <xs:enumeration value="MONTHLY" />
+            <xs:enumeration value="QUARTERLY" />
+            <xs:enumeration value="YEARLY" />
+        </xs:restriction>
+    </xs:simpleType>
+
+    <xs:simpleType name="x_cron_expression">
+        <xs:annotation>
+            <xs:documentation>
+                Source: https://quartz-scheduler.org/xml/job_scheduling_data_1_8.xsd
+                To examine you can break it up visually like as below.
+                SECONDS: (
+                ((([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?,)*([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?)
+                | (([\*]|[0-9]|[0-5][0-9])/([0-9]|[0-5][0-9])) | ([\?])
+                | ([\*]) ) [\s] MINUTES: (
+                ((([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?,)*([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?)
+                | (([\*]|[0-9]|[0-5][0-9])/([0-9]|[0-5][0-9])) | ([\?])
+                | ([\*]) ) [\s] HOURS: (
+                ((([0-9]|[0-1][0-9]|[2][0-3])(-([0-9]|[0-1][0-9]|[2][0-3]))?,)*([0-9]|[0-1][0-9]|[2][0-3])(-([0-9]|[0-1][0-9]|[2][0-3]))?)
+                |
+                (([\*]|[0-9]|[0-1][0-9]|[2][0-3])/([0-9]|[0-1][0-9]|[2][0-3]))
+                | ([\?]) | ([\*]) ) [\s] DAY OF MONTH: (
+                ((([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])(-([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1]))?,)*([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])(-([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1]))?(C)?)
+                |
+                (([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])/([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])(C)?)
+                | (L(-[0-9])?) | (L(-[1-2][0-9])?) | (L(-[3][0-1])?) |
+                (LW) | ([1-9]W) | ([1-3][0-9]W) | ([\?]) | ([\*]) )[\s]
+                MONTH: (
+                ((([1-9]|0[1-9]|1[0-2])(-([1-9]|0[1-9]|1[0-2]))?,)*([1-9]|0[1-9]|1[0-2])(-([1-9]|0[1-9]|1[0-2]))?)
+                | (([1-9]|0[1-9]|1[0-2])/([1-9]|0[1-9]|1[0-2])) |
+                (((JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)(-(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC))?,)*(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)(-(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC))?)
+                |
+                ((JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)/(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC))
+                | ([\?]) | ([\*]) )[\s] DAY OF WEEK: (
+                (([1-7](-([1-7]))?,)*([1-7])(-([1-7]))?) |
+                ([1-7]/([1-7])) |
+                (((MON|TUE|WED|THU|FRI|SAT|SUN)(-(MON|TUE|WED|THU|FRI|SAT|SUN))?,)*(MON|TUE|WED|THU|FRI|SAT|SUN)(-(MON|TUE|WED|THU|FRI|SAT|SUN))?(C)?)
+                |
+                ((MON|TUE|WED|THU|FRI|SAT|SUN)/(MON|TUE|WED|THU|FRI|SAT|SUN)(C)?)
+                | (([1-7]|(MON|TUE|WED|THU|FRI|SAT|SUN))(L|LW)?) |
+                (([1-7]|MON|TUE|WED|THU|FRI|SAT|SUN)#([1-7])?) | ([\?])
+                | ([\*]) ) YEAR (OPTIONAL): ( [\s]? ([\*])? |
+                ((19[7-9][0-9])|(20[0-9][0-9]))? |
+                (((19[7-9][0-9])|(20[0-9][0-9]))/((19[7-9][0-9])|(20[0-9][0-9])))?
+                |
+                ((((19[7-9][0-9])|(20[0-9][0-9]))(-((19[7-9][0-9])|(20[0-9][0-9])))?,)*((19[7-9][0-9])|(20[0-9][0-9]))(-((19[7-9][0-9])|(20[0-9][0-9])))?)?
+                )
+            </xs:documentation>
+        </xs:annotation>
+        <xs:restriction base="xs:string">
+            <xs:pattern
+                    value="(((([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?,)*([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?)|(([\*]|[0-9]|[0-5][0-9])/([0-9]|[0-5][0-9]))|([\?])|([\*]))[\s](((([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?,)*([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?)|(([\*]|[0-9]|[0-5][0-9])/([0-9]|[0-5][0-9]))|([\?])|([\*]))[\s](((([0-9]|[0-1][0-9]|[2][0-3])(-([0-9]|[0-1][0-9]|[2][0-3]))?,)*([0-9]|[0-1][0-9]|[2][0-3])(-([0-9]|[0-1][0-9]|[2][0-3]))?)|(([\*]|[0-9]|[0-1][0-9]|[2][0-3])/([0-9]|[0-1][0-9]|[2][0-3]))|([\?])|([\*]))[\s](((([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])(-([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1]))?,)*([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])(-([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1]))?(C)?)|(([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])/([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])(C)?)|(L(-[0-9])?)|(L(-[1-2][0-9])?)|(L(-[3][0-1])?)|(LW)|([1-9]W)|([1-3][0-9]W)|([\?])|([\*]))[\s](((([1-9]|0[1-9]|1[0-2])(-([1-9]|0[1-9]|1[0-2]))?,)*([1-9]|0[1-9]|1[0-2])(-([1-9]|0[1-9]|1[0-2]))?)|(([1-9]|0[1-9]|1[0-2])/([1
 -9]|0[1-9]|1[0-2]))|(((JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)(-(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC))?,)*(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)(-(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC))?)|((JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)/(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC))|([\?])|([\*]))[\s]((([1-7](-([1-7]))?,)*([1-7])(-([1-7]))?)|([1-7]/([1-7]))|(((MON|TUE|WED|THU|FRI|SAT|SUN)(-(MON|TUE|WED|THU|FRI|SAT|SUN))?,)*(MON|TUE|WED|THU|FRI|SAT|SUN)(-(MON|TUE|WED|THU|FRI|SAT|SUN))?(C)?)|((MON|TUE|WED|THU|FRI|SAT|SUN)/(MON|TUE|WED|THU|FRI|SAT|SUN)(C)?)|(([1-7]|(MON|TUE|WED|THU|FRI|SAT|SUN))?(L|LW)?)|(([1-7]|MON|TUE|WED|THU|FRI|SAT|SUN)#([1-7])?)|([\?])|([\*]))([\s]?(([\*])?|(19[7-9][0-9])|(20[0-9][0-9]))?| (((19[7-9][0-9])|(20[0-9][0-9]))/((19[7-9][0-9])|(20[0-9][0-9])))?| ((((19[7-9][0-9])|(20[0-9][0-9]))(-((19[7-9][0-9])|(20[0-9][0-9])))?,)*((19[7-9][0-9])|(20[0-9][0-9]))(-((19[7-9][0-9])|(20[0-9][0-9])))?)?)" />
+        </xs:restriction>
+    </xs:simpleType>
+
+    <xs:simpleType name="non_empty_string">
+        <xs:annotation>
+            <xs:documentation>
+                A type to represent non empty strings.
+            </xs:documentation>
+        </xs:annotation>
+        <xs:restriction base="xs:string">
+            <xs:minLength value="1"/>
+        </xs:restriction>
+    </xs:simpleType>
+
+</xs:schema>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-examples/src/main/resources/example-job.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/example-job.xml b/lens-examples/src/main/resources/example-job.xml
new file mode 100644
index 0000000..93f36d4
--- /dev/null
+++ b/lens-examples/src/main/resources/example-job.xml
@@ -0,0 +1,55 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements. See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership. The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied. See the License for the
+  specific language governing permissions and limitations
+  under the License.
+
+-->
+<job xmlns="uri:lens:job:0.1">
+    <name>Sample Scheduler job</name>
+    <execution>
+        <resource_path>/path/to/a/resource/</resource_path>
+        <session>
+            <db>myDb</db>
+            <conf>
+                <key>userName</key>
+                <value>ajayyadava</value>
+            </conf>
+        </session>
+        <query>
+            <query>cube select measure2 from sample_cube where time_range_in(dt, '2014-06-24-23', '2014-06-25-00')</query>
+            <conf>
+                <key>queryKey1</key>
+                <value>queryValue1</value>
+            </conf>
+        </query>
+    </execution>
+    <trigger>
+        <frequency timezone="UTC">
+            <!--You can also specify a frequency enum here instead of cron expression e.g.
+            <enum>DAILY</enum>
+            -->
+            <cron_expression>0 15 10 * * ? 2014</cron_expression> <!-- Fire at 10:15am every day during the year 2014-->
+        </frequency>
+    </trigger>
+    <job_conf>
+        <key>key1</key>
+        <value>value1</value>
+    </job_conf>
+    <start_time>2013-11-30T00:00:00</start_time>
+    <end_time>2015-12-30T00:00:00</end_time>
+</job>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-server-api/src/main/java/org/apache/lens/server/api/scheduler/QuerySchedulerService.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/scheduler/QuerySchedulerService.java b/lens-server-api/src/main/java/org/apache/lens/server/api/scheduler/QuerySchedulerService.java
deleted file mode 100644
index 9f48d27..0000000
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/scheduler/QuerySchedulerService.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.server.api.scheduler;
-
-/**
- * The Interface QuerySchedulerService.
- */
-public interface QuerySchedulerService {
-
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-server-api/src/main/java/org/apache/lens/server/api/scheduler/SchedulerJobStats.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/scheduler/SchedulerJobStats.java b/lens-server-api/src/main/java/org/apache/lens/server/api/scheduler/SchedulerJobStats.java
new file mode 100644
index 0000000..4952057
--- /dev/null
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/scheduler/SchedulerJobStats.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lens.server.api.scheduler;
+
+import lombok.Data;
+
+/**
+ * Stats for a scheduler's job.
+ */
+@Data
+public class SchedulerJobStats {
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-server-api/src/main/java/org/apache/lens/server/api/scheduler/SchedulerService.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/scheduler/SchedulerService.java b/lens-server-api/src/main/java/org/apache/lens/server/api/scheduler/SchedulerService.java
new file mode 100644
index 0000000..b6ec093
--- /dev/null
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/scheduler/SchedulerService.java
@@ -0,0 +1,238 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.server.api.scheduler;
+
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+
+import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.query.SchedulerJobHandle;
+import org.apache.lens.api.query.SchedulerJobInfo;
+import org.apache.lens.api.query.SchedulerJobInstanceHandle;
+import org.apache.lens.api.query.SchedulerJobInstanceInfo;
+import org.apache.lens.api.scheduler.XJob;
+import org.apache.lens.server.api.error.LensException;
+
+
+/**
+ * Scheduler interface.
+ */
+public interface SchedulerService {
+
+  /**
+   * Submit a job.
+   *
+   * @param sessionHandle handle for this session.
+   * @param job           job to be submitted.
+   * @return unique id for the submitted job.
+   * @throws LensException the lens exception
+   */
+  SchedulerJobHandle submitJob(LensSessionHandle sessionHandle, XJob job) throws LensException;
+
+  /**
+   * Schedule a job.
+   *
+   * @param sessionHandle handle for the current session.
+   * @param jobHandle     handle for the job to be scheduled.
+   * @throws LensException the lens exception
+   */
+  void scheduleJob(LensSessionHandle sessionHandle, SchedulerJobHandle jobHandle) throws LensException;
+
+
+  /**
+   * Submit a job and also schedule it.
+   *
+   * @param sessionHandle handle for the session.
+   * @param job           job definition.
+   * @return unique id of the job which is submitted and scheduled.
+   * @throws LensException the lens exception
+   */
+  SchedulerJobHandle submitAndScheduleJob(LensSessionHandle sessionHandle, XJob job) throws LensException;
+
+  /**
+   * Returns the definition of a job.
+   *
+   * @param sessionHandle handle for the session.
+   * @param jobHandle     handle for the job
+   * @return job definition
+   * @throws LensException the lens exception
+   */
+  XJob getJobDefinition(LensSessionHandle sessionHandle, SchedulerJobHandle jobHandle) throws LensException;
+
+
+  /**
+   * Returns the details of a job. Details may contain extra system information like id for the job.
+   *
+   * @param sessionHandle handle for the session.
+   * @param jobHandle     handle for the job
+   * @return job details for the job
+   * @throws LensException the lens exception
+   */
+  SchedulerJobInfo getJobDetails(LensSessionHandle sessionHandle,
+                                 SchedulerJobHandle jobHandle) throws LensException;
+
+  /**
+   * Update a job with new definition.
+   *
+   * Updates will be applied only for newer instances. Running instances will be running with old definition
+   *
+   * @param sessionHandle
+   * @param jobHandle        handle for the job which you want to update.
+   * @param newJobDefinition
+   * @return true or false based on whether the update was successful or failed.
+   * @throws LensException the lens exception
+   */
+  boolean updateJob(LensSessionHandle sessionHandle,
+                    SchedulerJobHandle jobHandle, XJob newJobDefinition) throws LensException;
+
+
+  /**
+   * End a job by specifying an expiry time.
+   *
+   * @param sessionHandle handle for the current session.
+   * @param jobHandle     handle for the job
+   * @param expiryTime    time after which the job shouldn't execute.
+   * @throws LensException the lens exception
+   */
+  void expireJob(LensSessionHandle sessionHandle, SchedulerJobHandle jobHandle,
+                 Date expiryTime) throws LensException;
+
+
+  /**
+   * Suspend a job.
+   *
+   * If the job is not in scheduled state, it will return true.
+   * Once a job is suspended, no further instances of that job will run.
+   * Any running instances of that job will continue normally.
+   *
+   * @param sessionHandle handle for the current session.
+   * @param jobHandle     handle for the job
+   * @return true if the job was suspended successfully, false otherwise.
+   * @throws LensException the lens exception
+   */
+  boolean suspendJob(LensSessionHandle sessionHandle, SchedulerJobHandle jobHandle) throws LensException;
+
+
+  /**
+   * Resume a job from a given time.
+   *
+   * @param sessionHandle handle for the session.
+   * @param jobHandle     handle for the job
+   * @param effectiveTime time from which to resume the instances.
+   * @return true if the job was resumed successfully, false otherwise.
+   * @throws LensException the lens exception
+   */
+  boolean resumeJob(LensSessionHandle sessionHandle, SchedulerJobHandle jobHandle,
+                    Date effectiveTime) throws LensException;
+
+  /**
+   * Delete a job.
+   *
+   * @param sessionHandle handle for the session.
+   * @param jobHandle     handle for the job
+   * @return true if the job was deleted successfully.
+   * @throws LensException the lens exception
+   */
+  boolean deleteJob(LensSessionHandle sessionHandle, SchedulerJobHandle jobHandle) throws LensException;
+
+
+  /**
+   * @param sessionHandle handle for the current session.
+   * @param state         filter for status, if specified only jobs in that state will be returned,
+   *                      if null no entries will be removed from result
+   * @param user          filter for user who submitted the job, if specified only jobs submitted by the given user
+   *                      will be returned, if not specified no entries will be removed from result on basis of userName
+   * @param jobName       filter for jobName, if specified only the jobs with name same as given name will be considered
+   *                      , else no jobs will be filtered out on the basis of name.
+   * @param startTime     if specified only instances with scheduleTime after this time will be considered.
+   * @param endTime       if specified only instances with scheduleTime before this time will be considered.
+   * @return A collection of stats per job
+   * @throws LensException
+   */
+  Collection<SchedulerJobStats> getAllJobStats(LensSessionHandle sessionHandle,
+                                      String state, String user,
+                                      String jobName, long startTime, long endTime) throws LensException;
+
+  /**
+   * Returns stats for a job.
+   *
+   * @param sessionHandle handle for session.
+   * @param handle        handle for the job
+   * @param state         filter for status, if specified only jobs in that state will be returned,
+   *                      if null no entries will be removed from result
+   * @param startTime     if specified only instances with scheduleTime after this time will be considered.
+   * @param endTime       if specified only instances with scheduleTime before this time will be considered.
+   * @throws LensException the lens exception
+   */
+  SchedulerJobStats getJobStats(LensSessionHandle sessionHandle, SchedulerJobHandle handle,
+                       String state, long startTime, long endTime) throws LensException;
+
+
+  /**
+   * Returns handles for last <code>numResults</code> instances for the job.
+   *
+   * @param sessionHandle handle for the session.
+   * @param jobHandle     handle for the job
+   * @param numResults    - number of results to be returned, default 100.
+   * @return list of instance ids for the job
+   * @throws LensException the lens exception
+   */
+  List<String> getJobInstances(LensSessionHandle sessionHandle,
+                               SchedulerJobHandle jobHandle, Long numResults) throws LensException;
+
+  /**
+   * Kills a running job instance.
+   *
+   * If the job instance is already completed or not in running state, this will be a no-op and will return false.
+   *
+   * @param sessionHandle  handle for the session.
+   * @param instanceHandle handle for the instance
+   * @return true if the instance was killed successfully, false otherwise.
+   * @throws LensException the lens exception
+   */
+  boolean killInstance(LensSessionHandle sessionHandle,
+                        SchedulerJobInstanceHandle instanceHandle) throws LensException;
+
+  /**
+   * Reruns a failed/killed/completed job instance.
+   *
+   * If the instance is not in a terminal state, then this operation will be a no-op and will return false.
+   *
+   * @param sessionHandle  handle for the session.
+   * @param instanceHandle handle for the instance
+   * @return true if the instance was re run successfully, false otherwise.
+   * @throws LensException the lens exception
+   */
+  boolean rerunInstance(LensSessionHandle sessionHandle,
+                        SchedulerJobInstanceHandle instanceHandle) throws LensException;
+
+  /**
+   * Instance details for an instance.
+   *
+   * @param sessionHandle  handle for the session.
+   * @param instanceHandle handle for the instance.
+   * @return details for the instance.
+   * @throws LensException the lens exception
+   */
+  SchedulerJobInstanceInfo getInstanceDetails(LensSessionHandle sessionHandle,
+                                              SchedulerJobInstanceHandle instanceHandle) throws LensException;
+
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-server/src/main/java/org/apache/lens/server/metrics/MetricsServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metrics/MetricsServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/metrics/MetricsServiceImpl.java
index 3389aba..6852265 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metrics/MetricsServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metrics/MetricsServiceImpl.java
@@ -48,7 +48,7 @@ import org.apache.lens.server.api.session.SessionService;
 import org.apache.lens.server.healthcheck.LensServiceHealthCheck;
 import org.apache.lens.server.query.QueryExecutionServiceImpl;
 import org.apache.lens.server.quota.QuotaServiceImpl;
-import org.apache.lens.server.scheduler.QuerySchedulerServiceImpl;
+import org.apache.lens.server.scheduler.SchedulerServiceImpl;
 import org.apache.lens.server.session.DatabaseResourceService;
 import org.apache.lens.server.session.HiveSessionService;
 
@@ -263,7 +263,7 @@ public class MetricsServiceImpl extends AbstractService implements MetricsServic
     healthCheck.register(CubeMetastoreService.NAME, new LensServiceHealthCheck(CubeMetastoreService.NAME));
     healthCheck.register(HiveSessionService.NAME, new LensServiceHealthCheck(HiveSessionService.NAME));
     healthCheck.register(QueryExecutionServiceImpl.NAME, new LensServiceHealthCheck(QueryExecutionServiceImpl.NAME));
-    healthCheck.register(QuerySchedulerServiceImpl.NAME, new LensServiceHealthCheck(QuerySchedulerServiceImpl.NAME));
+    healthCheck.register(SchedulerServiceImpl.NAME, new LensServiceHealthCheck(SchedulerServiceImpl.NAME));
     healthCheck.register(QuotaServiceImpl.NAME, new LensServiceHealthCheck(QuotaServiceImpl.NAME));
     healthCheck.register(MetricsServiceImpl.NAME, new LensServiceHealthCheck(MetricsServiceImpl.NAME));
     healthCheck.register(EventServiceImpl.NAME, new LensServiceHealthCheck(EventServiceImpl.NAME));

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-server/src/main/java/org/apache/lens/server/scheduler/QuerySchedulerServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/scheduler/QuerySchedulerServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/scheduler/QuerySchedulerServiceImpl.java
deleted file mode 100644
index 24949bd..0000000
--- a/lens-server/src/main/java/org/apache/lens/server/scheduler/QuerySchedulerServiceImpl.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.server.scheduler;
-
-import org.apache.lens.server.BaseLensService;
-import org.apache.lens.server.api.health.HealthStatus;
-import org.apache.lens.server.api.scheduler.QuerySchedulerService;
-
-import org.apache.hive.service.cli.CLIService;
-
-/**
- * The Class QuerySchedulerServiceImpl.
- */
-public class QuerySchedulerServiceImpl extends BaseLensService implements QuerySchedulerService {
-
-  /**
-   * The constant name for scheduler service.
-   */
-  public static final String NAME = "scheduler";
-
-  /**
-   * Instantiates a new query scheduler service impl.
-   *
-   * @param cliService the cli service
-   */
-  public QuerySchedulerServiceImpl(CLIService cliService) {
-    super(NAME, cliService);
-  }
-
-  /**
-   * {@inheritDoc}
-   */
-  @Override
-  public HealthStatus getHealthStatus() {
-    return this.getServiceState().equals(STATE.STARTED)
-        ? new HealthStatus(true, "Query scheduler service is healthy.")
-        : new HealthStatus(false, "Query scheduler service is down.");
-  }
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-server/src/main/java/org/apache/lens/server/scheduler/SchedulerServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/scheduler/SchedulerServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/scheduler/SchedulerServiceImpl.java
new file mode 100644
index 0000000..ce98601
--- /dev/null
+++ b/lens-server/src/main/java/org/apache/lens/server/scheduler/SchedulerServiceImpl.java
@@ -0,0 +1,200 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.server.scheduler;
+
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+
+import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.query.SchedulerJobHandle;
+import org.apache.lens.api.query.SchedulerJobInfo;
+import org.apache.lens.api.query.SchedulerJobInstanceHandle;
+import org.apache.lens.api.query.SchedulerJobInstanceInfo;
+import org.apache.lens.api.scheduler.XJob;
+import org.apache.lens.server.BaseLensService;
+import org.apache.lens.server.api.error.LensException;
+import org.apache.lens.server.api.health.HealthStatus;
+import org.apache.lens.server.api.scheduler.SchedulerJobStats;
+import org.apache.lens.server.api.scheduler.SchedulerService;
+
+import org.apache.hive.service.cli.CLIService;
+/**
+ * The Class QuerySchedulerService.
+ */
+public class SchedulerServiceImpl extends BaseLensService implements SchedulerService {
+
+  /**
+   * The constant name for scheduler service.
+   */
+  public static final String NAME = "scheduler";
+
+  /**
+   * Instantiates a new scheduler service.
+   *
+   * @param cliService the cli service
+   */
+  public SchedulerServiceImpl(CLIService cliService) {
+    super(NAME, cliService);
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public HealthStatus getHealthStatus() {
+    return this.getServiceState().equals(STATE.STARTED)
+        ? new HealthStatus(true, "Scheduler service is healthy.")
+        : new HealthStatus(false, "Scheduler service is down.");
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public SchedulerJobHandle submitJob(LensSessionHandle sessionHandle, XJob job) throws LensException {
+    return null;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public void scheduleJob(LensSessionHandle sessionHandle,
+                                        SchedulerJobHandle jobHandle) throws LensException {
+  }
+
+  @Override
+  public SchedulerJobHandle submitAndScheduleJob(LensSessionHandle sessionHandle, XJob job) throws LensException {
+    return null;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public XJob getJobDefinition(LensSessionHandle sessionHandle, SchedulerJobHandle jobHandle) throws LensException {
+    return null;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public SchedulerJobInfo getJobDetails(LensSessionHandle sessionHandle,
+                                        SchedulerJobHandle jobHandle) throws LensException {
+    return null;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public boolean updateJob(LensSessionHandle sessionHandle, SchedulerJobHandle jobHandle,
+                           XJob newJobDefinition) throws LensException {
+    return false;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public void expireJob(LensSessionHandle sessionHandle, SchedulerJobHandle jobHandle,
+                        Date expiryTime) throws LensException {
+
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public boolean suspendJob(LensSessionHandle sessionHandle, SchedulerJobHandle jobHandle) throws LensException {
+    return false;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public boolean resumeJob(LensSessionHandle sessionHandle, SchedulerJobHandle jobHandle,
+                           Date effectiveTime) throws LensException {
+    return false;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public boolean deleteJob(LensSessionHandle sessionHandle, SchedulerJobHandle jobHandle) throws LensException {
+    return false;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public Collection<SchedulerJobStats> getAllJobStats(LensSessionHandle sessionHandle, String state, String user,
+                                             String jobName, long startTime, long endTime) throws LensException {
+    return null;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public SchedulerJobStats getJobStats(LensSessionHandle sessionHandle, SchedulerJobHandle handle, String state,
+                              long startTime, long endTime) throws LensException {
+    return null;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public boolean rerunInstance(LensSessionHandle sessionHandle,
+                               SchedulerJobInstanceHandle instanceHandle) throws LensException {
+    return false;
+  }
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public List<String> getJobInstances(LensSessionHandle sessionHandle,
+                                      SchedulerJobHandle jobHandle, Long numResults) throws LensException {
+    return null;
+  }
+
+  @Override
+  public boolean killInstance(LensSessionHandle sessionHandle,
+                              SchedulerJobInstanceHandle instanceHandle) throws LensException {
+    return false;
+  }
+
+
+  /**
+   * {@inheritDoc}
+   */
+  @Override
+  public SchedulerJobInstanceInfo getInstanceDetails(LensSessionHandle sessionHandle,
+                                                     SchedulerJobInstanceHandle instanceHandle) throws LensException {
+    return null;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-server/src/main/resources/lensserver-default.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/main/resources/lensserver-default.xml b/lens-server/src/main/resources/lensserver-default.xml
index 39b72dc..cac641a 100644
--- a/lens-server/src/main/resources/lensserver-default.xml
+++ b/lens-server/src/main/resources/lensserver-default.xml
@@ -91,7 +91,7 @@
 
   <property>
     <name>lens.server.scheduler.service.impl</name>
-    <value>org.apache.lens.server.scheduler.QuerySchedulerServiceImpl</value>
+    <value>org.apache.lens.server.scheduler.SchedulerServiceImpl</value>
     <description>Implementation class for query scheduler service</description>
   </property>
 

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/lens-server/src/test/java/org/apache/lens/server/healthcheck/TestHealthChecks.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/healthcheck/TestHealthChecks.java b/lens-server/src/test/java/org/apache/lens/server/healthcheck/TestHealthChecks.java
index e11413e..8e22c7a 100644
--- a/lens-server/src/test/java/org/apache/lens/server/healthcheck/TestHealthChecks.java
+++ b/lens-server/src/test/java/org/apache/lens/server/healthcheck/TestHealthChecks.java
@@ -32,7 +32,7 @@ import org.apache.lens.server.metastore.CubeMetastoreServiceImpl;
 import org.apache.lens.server.metastore.MetastoreApp;
 import org.apache.lens.server.metrics.MetricsServiceImpl;
 import org.apache.lens.server.quota.QuotaServiceImpl;
-import org.apache.lens.server.scheduler.QuerySchedulerServiceImpl;
+import org.apache.lens.server.scheduler.SchedulerServiceImpl;
 import org.apache.lens.server.session.HiveSessionService;
 
 import org.glassfish.jersey.client.ClientConfig;
@@ -93,7 +93,7 @@ public class TestHealthChecks extends LensJerseyTest {
 
   @Test
   public void testQuerySchedulerServiceHealth() throws Exception {
-    checkHealth(QuerySchedulerServiceImpl.NAME);
+    checkHealth(SchedulerServiceImpl.NAME);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 732b3bb..746204e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -58,7 +58,7 @@
     <commons.collections.version>3.2.1</commons.collections.version>
     <joda.time.version>2.0</joda.time.version>
     <guava.version>13.0.1</guava.version>
-    <lombok.version>1.12.4</lombok.version>
+    <lombok.version>1.16.6</lombok.version>
     <lombok.maven.plugin.version>1.16.4.1</lombok.maven.plugin.version>
     <typesafe.config.version>1.2.1</typesafe.config.version>
     <logback.version>1.1.3</logback.version>

http://git-wip-us.apache.org/repos/asf/lens/blob/7e9e47ea/src/site/apt/admin/config.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/admin/config.apt b/src/site/apt/admin/config.apt
index bcf4b7d..54f827e 100644
--- a/src/site/apt/admin/config.apt
+++ b/src/site/apt/admin/config.apt
@@ -165,7 +165,7 @@ Lens server configuration
 *--+--+---+--+
 |68|lens.server.savedquery.ws.resource.impl|org.apache.lens.server.query.save.SavedQueryResource|Implementation class for Saved query Resource|
 *--+--+---+--+
-|69|lens.server.scheduler.service.impl|org.apache.lens.server.scheduler.QuerySchedulerServiceImpl|Implementation class for query scheduler service|
+|69|lens.server.scheduler.service.impl|org.apache.lens.server.scheduler.QuerySchedulerService|Implementation class for query scheduler service|
 *--+--+---+--+
 |70|lens.server.scheduler.ws.resource.impl|org.apache.lens.server.scheduler.ScheduleResource|Implementation class for query scheduler resource|
 *--+--+---+--+


[29/50] [abbrv] lens git commit: LENS-840 : LDAP config loader should be more forgiving for attribute absense

Posted by sh...@apache.org.
LENS-840 : LDAP config loader should be more forgiving for attribute absense


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/b3b7e5f4
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/b3b7e5f4
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/b3b7e5f4

Branch: refs/heads/LENS-581
Commit: b3b7e5f4d087be5377e0f043e00b57dfb6e4e124
Parents: 114dab3
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Mon Nov 30 11:37:14 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Mon Nov 30 11:37:14 2015 +0530

----------------------------------------------------------------------
 .../lens/server/user/LDAPBackedDatabaseUserConfigLoader.java     | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/b3b7e5f4/lens-server/src/main/java/org/apache/lens/server/user/LDAPBackedDatabaseUserConfigLoader.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/user/LDAPBackedDatabaseUserConfigLoader.java b/lens-server/src/main/java/org/apache/lens/server/user/LDAPBackedDatabaseUserConfigLoader.java
index 82b76c9..a647282 100644
--- a/lens-server/src/main/java/org/apache/lens/server/user/LDAPBackedDatabaseUserConfigLoader.java
+++ b/lens-server/src/main/java/org/apache/lens/server/user/LDAPBackedDatabaseUserConfigLoader.java
@@ -29,6 +29,7 @@ import java.util.concurrent.TimeUnit;
 import javax.naming.Context;
 import javax.naming.NamingEnumeration;
 import javax.naming.NamingException;
+import javax.naming.directory.Attribute;
 import javax.naming.directory.SearchControls;
 import javax.naming.directory.SearchResult;
 import javax.naming.ldap.InitialLdapContext;
@@ -155,7 +156,8 @@ public class LDAPBackedDatabaseUserConfigLoader extends DatabaseUserConfigLoader
     String[] attributes = new String[ldapFields.length];
     SearchResult sr = findAccountByAccountName(user);
     for (int i = 0; i < attributes.length; i++) {
-      attributes[i] = sr.getAttributes().get(ldapFields[i]).get().toString();
+      Attribute attr = sr.getAttributes().get(ldapFields[i]);
+      attributes[i] = (attr == null ? null : attr.get().toString());
     }
     return attributes;
   }


[30/50] [abbrv] lens git commit: LENS-891 : Fix Weekly covering info for last Sunday of year and first Sunday of next year

Posted by sh...@apache.org.
LENS-891 : Fix Weekly covering info for last Sunday of year and first Sunday of next year


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/b63e06c0
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/b63e06c0
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/b63e06c0

Branch: refs/heads/LENS-581
Commit: b63e06c0cdec67ffb6dc5f14d89f7d10091ef2d5
Parents: b3b7e5f
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Thu Dec 10 11:06:52 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Thu Dec 10 11:06:52 2015 +0530

----------------------------------------------------------------------
 .../src/main/java/org/apache/lens/cube/parse/DateUtil.java  | 9 +--------
 .../test/java/org/apache/lens/cube/parse/TestDateUtil.java  | 4 ++++
 2 files changed, 5 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/b63e06c0/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
index 5e17eac..cd05c68 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
@@ -343,20 +343,13 @@ public final class DateUtil {
 
     Calendar cal = Calendar.getInstance();
     cal.setTime(from);
-    int fromWeek = cal.get(Calendar.WEEK_OF_YEAR);
     int fromDay = cal.get(Calendar.DAY_OF_WEEK);
-    int fromYear = cal.get(YEAR);
-
-    cal.clear();
-    cal.set(YEAR, fromYear);
-    cal.set(Calendar.WEEK_OF_YEAR, fromWeek);
     cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
-    int maxDayInWeek = cal.getActualMaximum(Calendar.DAY_OF_WEEK);
     Date fromWeekStartDate = cal.getTime();
     boolean coverable = dayDiff % 7 == 0;
     if (fromWeekStartDate.before(from)) {
       // Count from the start of next week
-      dayDiff -= (maxDayInWeek - (fromDay - Calendar.SUNDAY));
+      dayDiff -= (cal.getActualMaximum(Calendar.DAY_OF_WEEK) - (fromDay - Calendar.SUNDAY));
       coverable = false;
     }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b63e06c0/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
index ab88fbe..ff9a96d 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDateUtil.java
@@ -76,6 +76,7 @@ public class TestDateUtil {
     }
   }
 
+
   @Test
   public void testMonthsBetween() throws Exception {
     int i = 0;
@@ -194,6 +195,9 @@ public class TestDateUtil {
 
     weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2013-May-26"), DATE_FMT.parse("2013-Jun-10"));
     assertEquals(weeks, new CoveringInfo(2, false), "2013-May-26 to 2013-Jun-10");
+
+    weeks = getWeeklyCoveringInfo(DATE_FMT.parse("2015-Dec-27"), DATE_FMT.parse("2016-Jan-03"));
+    assertEquals(weeks, new CoveringInfo(1, true), "2015-Dec-27 to 2016-Jan-03");
   }
 
   @Test


[34/50] [abbrv] lens git commit: LENS-885: Cleanup of Cube test cases

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
index d16ea4c..36c1dba 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
@@ -19,6 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import java.util.*;
@@ -55,18 +56,16 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
   @Test
   public void testDenormsAsDirectFields() throws ParseException, LensException, HiveException {
     // denorm fields directly available
-    String twoDaysITRange =
-      "time_range_in(it, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
-    String hqlQuery = rewrite("select dim2big1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf);
+    String hqlQuery = rewrite("select dim2big1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT,
+      conf);
     String expecteddim2big1 =
       getExpectedQuery(cubeName, "select testcube.dim2big1," + " max(testcube.msr3), sum(testcube.msr2) FROM ", null,
         " group by testcube.dim2big1", getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary4"),
         null);
     TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big1);
     // with another table
-    hqlQuery = rewrite("select dim2big1, citydim.name, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange,
-      conf);
+    hqlQuery = rewrite("select dim2big1, citydim.name, max(msr3)," + " msr2 from testCube" + " where "
+      + TWO_DAYS_RANGE_IT, conf);
     String expecteddim2big1WithAnotherTable = getExpectedQuery(cubeName,
       "select testcube.dim2big1, citydim.name, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
         + getDbName() + "c1_citytable citydim " + "on testcube.cityid = citydim.id and citydim.dt = 'latest' ", null,
@@ -75,7 +74,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
       null);
     TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big1WithAnotherTable);
 
-    hqlQuery = rewrite("select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf);
+    hqlQuery = rewrite("select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT, conf);
     String expecteddim2big2 =
       getExpectedQuery(cubeName, "select testcube.dim2big2, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
         " group by testcube.dim2big2", getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary4"),
@@ -84,8 +83,8 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
 
     Configuration conf2 = new Configuration(conf);
     conf2.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
-    hqlQuery =
-      rewrite("select testdim3.name, dim2big1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf2);
+    hqlQuery = rewrite("select testdim3.name, dim2big1, max(msr3)," + " msr2 from testCube" + " where "
+      + TWO_DAYS_RANGE_IT, conf2);
     String expected =
       getExpectedQuery(cubeName,
         "select testdim3.name, testcube.dim2big1, max(testcube.msr3), sum(testcube.msr2) FROM ", " JOIN "
@@ -96,9 +95,9 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
         null);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
-    hqlQuery = rewrite("select dim2big1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf2);
+    hqlQuery = rewrite("select dim2big1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT, conf2);
     TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big1);
-    hqlQuery = rewrite("select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf2);
+    hqlQuery = rewrite("select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE_IT, conf2);
     TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big2);
   }
 
@@ -190,11 +189,8 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
 
   @Test
   public void testCubeQueryWithExpressionHavingDenormColumnComingAsDirectColumn() throws Exception {
-    String twoDaysITRange =
-      "time_range_in(it, '" + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','"
-        + CubeTestSetup.getDateUptoHours(NOW) + "')";
-    String hqlQuery = rewrite("select substrdim2big1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange,
-      conf);
+    String hqlQuery = rewrite("select substrdim2big1, max(msr3)," + " msr2 from testCube" + " where "
+      + TWO_DAYS_RANGE_IT, conf);
     String expecteddim2big1 =
       getExpectedQuery(cubeName, "select substr(testcube.dim2big1, 5), max(testcube.msr3), sum(testcube.msr2) FROM ",
         null, " group by substr(testcube.dim2big1, 5)",

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
index 0d1f9fe..f48e1b7 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionContext.java
@@ -19,11 +19,11 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.TWO_DAYS_RANGE;
+
 import java.util.ArrayList;
 import java.util.List;
 
-import static org.apache.lens.cube.parse.CubeTestSetup.TWO_DAYS_RANGE;
-
 import org.apache.lens.cube.parse.ExpressionResolver.ExprSpecContext;
 
 import org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
index 1e21fb0..e77f919 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
@@ -19,6 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
index ea561b6..d9e442d 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
@@ -19,6 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import static org.testng.Assert.*;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java
index 4a23818..b98fdfb 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java
@@ -19,6 +19,8 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
+
 import java.text.DateFormat;
 import java.util.ArrayList;
 import java.util.List;
@@ -43,13 +45,13 @@ public class TestORTimeRangeWriter extends TestTimeRangeWriter {
   public void validateDisjoint(String whereClause, DateFormat format) {
     List<String> parts = new ArrayList<String>();
     if (format == null) {
-      parts.add(UpdatePeriod.MONTHLY.format(CubeTestSetup.TWO_MONTHS_BACK));
-      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.TWODAYS_BACK));
-      parts.add(UpdatePeriod.HOURLY.format(CubeTestSetup.NOW));
+      parts.add(UpdatePeriod.MONTHLY.format(TWO_MONTHS_BACK));
+      parts.add(UpdatePeriod.DAILY.format(TWODAYS_BACK));
+      parts.add(UpdatePeriod.HOURLY.format(NOW));
     } else {
-      parts.add(format.format(CubeTestSetup.TWO_MONTHS_BACK));
-      parts.add(format.format(CubeTestSetup.TWODAYS_BACK));
-      parts.add(format.format(CubeTestSetup.NOW));
+      parts.add(format.format(TWO_MONTHS_BACK));
+      parts.add(format.format(TWODAYS_BACK));
+      parts.add(format.format(NOW));
     }
 
     System.out.println("Expected :" + StorageUtil.getWherePartClause("dt", "test", parts));
@@ -60,30 +62,16 @@ public class TestORTimeRangeWriter extends TestTimeRangeWriter {
   public void validateConsecutive(String whereClause, DateFormat format) {
     List<String> parts = new ArrayList<String>();
     if (format == null) {
-      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.ONE_DAY_BACK));
-      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.TWODAYS_BACK));
-      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.NOW));
-    } else {
-      parts.add(format.format(CubeTestSetup.ONE_DAY_BACK));
-      parts.add(format.format(CubeTestSetup.TWODAYS_BACK));
-      parts.add(format.format(CubeTestSetup.NOW));
-    }
-
-    System.out.println("Expected :" + StorageUtil.getWherePartClause("dt", "test", parts));
-    Assert.assertEquals(whereClause, StorageUtil.getWherePartClause("dt", "test", parts));
-  }
-
-  @Override
-  public void validateSingle(String whereClause, DateFormat format) {
-    List<String> parts = new ArrayList<String>();
-    if (format == null) {
-      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.ONE_DAY_BACK));
+      parts.add(getDateStringWithOffset(UpdatePeriod.DAILY, -1));
+      parts.add(getDateStringWithOffset(UpdatePeriod.DAILY, -2));
+      parts.add(getDateStringWithOffset(UpdatePeriod.DAILY, 0));
     } else {
-      parts.add(format.format(CubeTestSetup.ONE_DAY_BACK));
+      parts.add(format.format(getDateWithOffset(UpdatePeriod.DAILY, -1)));
+      parts.add(format.format(getDateWithOffset(UpdatePeriod.DAILY, -2)));
+      parts.add(format.format(getDateWithOffset(UpdatePeriod.DAILY, 0)));
     }
 
     System.out.println("Expected :" + StorageUtil.getWherePartClause("dt", "test", parts));
     Assert.assertEquals(whereClause, StorageUtil.getWherePartClause("dt", "test", parts));
   }
-
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
index 255aade..571f7de 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
@@ -19,7 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
-import static org.apache.lens.cube.parse.CubeTestSetup.TWO_DAYS_RANGE;
+import static org.apache.lens.cube.metadata.DateFactory.TWO_DAYS_RANGE;
 
 import java.util.Arrays;
 

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
index 4d3a3dc..5a072e4 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestRewriterPlan.java
@@ -19,7 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
-import static org.apache.lens.cube.parse.CubeTestSetup.TWO_DAYS_RANGE;
+import static org.apache.lens.cube.metadata.DateFactory.TWO_DAYS_RANGE;
 
 import java.util.Arrays;
 import java.util.Collections;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java
index 73c3338..0069609 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestStorageUtil.java
@@ -19,6 +19,8 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
+
 import java.util.*;
 
 import org.apache.lens.cube.metadata.FactPartition;
@@ -64,9 +66,9 @@ public class TestStorageUtil {
     Configuration conf = new Configuration();
     // {s1,s2,s3}, {s3}, {s3} -> {s3}
     List<FactPartition> answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s123));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s3));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s3));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s123));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s3));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s3));
     Map<String, Set<FactPartition>> result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -74,15 +76,15 @@ public class TestStorageUtil {
     Assert.assertEquals("S3", result.keySet().iterator().next());
     Set<FactPartition> coveredParts = result.get("S3");
     Assert.assertEquals(3, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+    Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
+    Assert.assertTrue(contains(coveredParts, NOW));
 
     // {s1,s2,s3}, {s4}, {s5} - > {s1,s4,s5} or {s2,s4,s5} or {s3,s4,s5}
     answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s123));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s4));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s5));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s123));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s4));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s5));
     result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -93,10 +95,10 @@ public class TestStorageUtil {
       || result.keySet().contains("S3"));
     coveredParts = result.get("S4");
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
     coveredParts = result.get("S5");
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+    Assert.assertTrue(contains(coveredParts, NOW));
     coveredParts = result.get("S1");
     if (coveredParts == null) {
       coveredParts = result.get("S2");
@@ -105,13 +107,13 @@ public class TestStorageUtil {
       coveredParts = result.get("S3");
     }
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
 
     // {s1}, {s2}, {s3} -> {s1,s2,s3}
     answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s1));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s2));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s3));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s1));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s2));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s3));
     result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -121,19 +123,19 @@ public class TestStorageUtil {
     Assert.assertTrue(result.keySet().contains("S3"));
     coveredParts = result.get("S1");
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
     coveredParts = result.get("S2");
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
     coveredParts = result.get("S3");
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+    Assert.assertTrue(contains(coveredParts, NOW));
 
     // {s1, s2}, {s2, s3}, {s4} -> {s2,s4}
     answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s23));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s4));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s23));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s4));
     result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -142,17 +144,17 @@ public class TestStorageUtil {
     Assert.assertTrue(result.keySet().contains("S4"));
     coveredParts = result.get("S2");
     Assert.assertEquals(2, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
     coveredParts = result.get("S4");
     Assert.assertEquals(1, coveredParts.size());
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+    Assert.assertTrue(contains(coveredParts, NOW));
 
     // {s1, s2}, {s2, s4}, {s4} -> {s1,s4} or {s2,s4}
     answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s24));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s4));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s24));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s4));
     result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -163,32 +165,32 @@ public class TestStorageUtil {
     if (coveredParts == null) {
       coveredParts = result.get("S2");
       Assert.assertTrue(coveredParts.size() >= 1);
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+      Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
       if (coveredParts.size() == 2) {
-        Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+        Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
         Assert.assertEquals(1, result.get("S4").size());
       }
       coveredParts = result.get("S4");
       Assert.assertTrue(coveredParts.size() >= 1);
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+      Assert.assertTrue(contains(coveredParts, NOW));
       if (coveredParts.size() == 2) {
-        Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+        Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
         Assert.assertEquals(1, result.get("S2").size());
       }
     } else {
       Assert.assertEquals(1, coveredParts.size());
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+      Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
       coveredParts = result.get("S4");
       Assert.assertTrue(coveredParts.size() >= 1);
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+      Assert.assertTrue(contains(coveredParts, NOW));
+      Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
     }
 
     // {s1, s2}, {s2, s3}, {s3,s4} -> {s2,s3} or {s1, s3} or {s2, s4}
     answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s23));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s34));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s23));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s34));
     result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -204,24 +206,24 @@ public class TestStorageUtil {
     //Assert.assertTrue(result.keySet().contains("S3"));
     /* coveredParts = result.get("S2");
     Assert.assertTrue(coveredParts.size() >= 1);
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
     if (coveredParts.size() == 2) {
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+      Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
       Assert.assertEquals(1, result.get("S3").size());
     }
     coveredParts = result.get("S3");
     Assert.assertTrue(coveredParts.size() >= 1);
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+    Assert.assertTrue(contains(coveredParts, NOW));
     if (coveredParts.size() == 2) {
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+      Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
       Assert.assertEquals(1, result.get("S2").size());
     }*/
 
     // {s1, s2}, {s2}, {s1} -> {s1,s2}
     answeringParts = new ArrayList<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s2));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null, s1));
+    answeringParts.add(new FactPartition("dt", TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null, s12));
+    answeringParts.add(new FactPartition("dt", TWODAYS_BACK, UpdatePeriod.DAILY, null, null, s2));
+    answeringParts.add(new FactPartition("dt", NOW, UpdatePeriod.HOURLY, null, null, s1));
     result = new HashMap<String, Set<FactPartition>>();
     StorageUtil.getMinimalAnsweringTables(answeringParts, result);
     System.out.println("results:" + result);
@@ -230,16 +232,16 @@ public class TestStorageUtil {
     Assert.assertTrue(result.keySet().contains("S2"));
     coveredParts = result.get("S2");
     Assert.assertTrue(coveredParts.size() >= 1);
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWODAYS_BACK));
+    Assert.assertTrue(contains(coveredParts, TWODAYS_BACK));
     if (coveredParts.size() == 2) {
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+      Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
       Assert.assertEquals(1, result.get("S1").size());
     }
     coveredParts = result.get("S1");
     Assert.assertTrue(coveredParts.size() >= 1);
-    Assert.assertTrue(contains(coveredParts, CubeTestSetup.NOW));
+    Assert.assertTrue(contains(coveredParts, NOW));
     if (coveredParts.size() == 2) {
-      Assert.assertTrue(contains(coveredParts, CubeTestSetup.TWO_MONTHS_BACK));
+      Assert.assertTrue(contains(coveredParts, TWO_MONTHS_BACK));
       Assert.assertEquals(1, result.get("S2").size());
     }
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
index a431717..eb8c6eb 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeExtractor.java
@@ -19,12 +19,15 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import java.util.List;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.TestCubeMetastoreClient;
+import org.apache.lens.cube.metadata.TimeRange;
+import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
@@ -39,14 +42,10 @@ import org.testng.annotations.Test;
 
 public class TestTimeRangeExtractor extends TestQueryRewrite {
   private CubeQueryRewriter driver;
-  private String dateNow;
-  private String dateTwoDaysBack;
 
   @BeforeTest
   public void setupInstance() throws Exception {
     driver = new CubeQueryRewriter(new Configuration(), new HiveConf());
-    dateTwoDaysBack = getDateUptoHours(TWODAYS_BACK);
-    dateNow = getDateUptoHours(NOW);
   }
 
   @AfterTest
@@ -61,7 +60,8 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
 
   @Test
   public void testTimeRangeValidation() throws Exception {
-    String timeRange2 = " time_range_in(d_time, '" + dateNow + "','" + dateTwoDaysBack + "')";
+    // reverse range
+    String timeRange2 = getTimeRangeString(UpdatePeriod.DAILY, 0, -2, UpdatePeriod.HOURLY);
     try {
       // this should throw exception because from date is after to date
       driver.rewrite("SELECT cityid, testCube.msr2 from" + " testCube where " + timeRange2);
@@ -74,7 +74,8 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
 
   @Test
   public void testEqualTimeRangeValidation() throws Exception {
-    String equalTimeRange = " time_range_in(d_time, '" + dateNow + "','" + dateNow + "')";
+    // zero range
+    String equalTimeRange = getTimeRangeString(UpdatePeriod.HOURLY, 0, 0);
     try {
       // this should throw exception because from date and to date are same
       driver.rewrite("SELECT cityid, testCube.msr2 from" + " testCube where " + equalTimeRange);
@@ -87,18 +88,16 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
 
   @Test
   public void testNoNPE() throws Exception {
-    String timeRange = " time_range_in(d_time, '" + dateTwoDaysBack + "','" + dateNow + "')";
-    String q1 = "SELECT cityid, testCube.msr2 from testCube where " + timeRange + " AND cityid IS NULL";
+    String q1 = "SELECT cityid, testCube.msr2 from testCube where " + TWO_DAYS_RANGE + " AND cityid IS NULL";
     rewrite(driver, q1);
-    q1 = "SELECT cityid, testCube.msr2 from testCube where cityid IS NULL AND " + timeRange;
+    q1 = "SELECT cityid, testCube.msr2 from testCube where cityid IS NULL AND " + TWO_DAYS_RANGE;
     rewrite(driver, q1);
   }
 
   @Test
   public void testTimeRangeASTPosition() throws Exception {
     // check that time range can be any child of AND
-    String timeRange = " time_range_in(d_time, '" + dateTwoDaysBack + "','" + dateNow + "')";
-    String q1 = "SELECT cityid, testCube.msr2 from testCube where " + timeRange + " AND cityid=1";
+    String q1 = "SELECT cityid, testCube.msr2 from testCube where " + TWO_DAYS_RANGE + " AND cityid=1";
     CubeQueryContext cubeql = driver.rewrite(q1);
     String hql = cubeql.toHQL();
   }
@@ -106,10 +105,9 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
   @Test
   public void testPartitionColNameExtract() throws Exception {
     String q2 =
-      "SELECT cityid, testCube.msr3 from testCube where cityid=1 AND " + " time_range_in(d_time, '" + dateTwoDaysBack
-        + "','" + dateNow + "')";
+      "SELECT cityid, testCube.msr3 from testCube where cityid=1 AND " + TWO_DAYS_RANGE;
     CubeQueryContext cubeql = driver.rewrite(q2);
-    String hql = cubeql.toHQL();
+    cubeql.toHQL();
     // Check that column name in time range is extracted properly
     TimeRange range = cubeql.getTimeRanges().get(0);
     Assert.assertNotNull(range);
@@ -124,12 +122,11 @@ public class TestTimeRangeExtractor extends TestQueryRewrite {
     String dateNow = getDateUptoHours(NOW);
     // time range within time range
     String q3 =
-      "SELECT cityid, testCube.msr3 FROM testCube where cityid=1 AND" + "  (time_range_in(d_time, '" + dateTwoDaysBack
-        + "','" + dateNow + "')  "
+      "SELECT cityid, testCube.msr3 FROM testCube where cityid=1 AND (" + TWO_DAYS_RANGE
         // Time range as sibling of the first time range
-        + " OR " + " time_range_in(d_time, '" + dateTwoDaysBack + "', '" + dateNow + "'))";
+        + " OR " + TWO_DAYS_RANGE + ")";
     CubeQueryContext cubeql = driver.rewrite(q3);
-    String hql = cubeql.toHQL();
+    cubeql.toHQL();
 
     List<TimeRange> ranges = cubeql.getTimeRanges();
     Assert.assertEquals(2, ranges.size());

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
index 1fc8bc8..da0e4f4 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
@@ -19,9 +19,9 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.COLUMN_NOT_FOUND;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.FACT_NOT_AVAILABLE_IN_RANGE;
-import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import static org.testng.Assert.assertEquals;
 import static org.testng.Assert.assertTrue;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
index 0248409..87e128f 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriter.java
@@ -19,14 +19,18 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateFactory.*;
+import static org.apache.lens.cube.metadata.UpdatePeriod.*;
+
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
+import java.util.ArrayList;
 import java.util.LinkedHashSet;
+import java.util.List;
 import java.util.Set;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.FactPartition;
-import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.server.api.error.LensException;
 
 import org.testng.Assert;
@@ -45,16 +49,26 @@ public abstract class TestTimeRangeWriter {
 
   public abstract void validateConsecutive(String whereClause, DateFormat format);
 
-  public abstract void validateSingle(String whereClause, DateFormat object);
+  public void validateSingle(String whereClause, DateFormat format) {
+    List<String> parts = new ArrayList<String>();
+    if (format == null) {
+      parts.add(getDateStringWithOffset(DAILY, -1));
+    } else {
+      parts.add(format.format(getDateWithOffset(DAILY, -1)));
+    }
+
+    System.out.println("Expected :" + StorageUtil.getWherePartClause("dt", "test", parts));
+    Assert.assertEquals(whereClause, StorageUtil.getWherePartClause("dt", "test", parts));
+  }
 
   public static final DateFormat DB_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
 
   @Test
   public void testDisjointParts() {
     Set<FactPartition> answeringParts = new LinkedHashSet<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, null));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(MONTHLY, -2), MONTHLY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -2), DAILY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(HOURLY, 0), HOURLY, null, null));
 
     LensException th = null;
     String whereClause = null;
@@ -76,10 +90,10 @@ public abstract class TestTimeRangeWriter {
     }
 
     // test with format
-    answeringParts = new LinkedHashSet<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWO_MONTHS_BACK, UpdatePeriod.MONTHLY, null, DB_FORMAT));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, DB_FORMAT));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.HOURLY, null, DB_FORMAT));
+    answeringParts = new LinkedHashSet<>();
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(MONTHLY, -2), MONTHLY, null, DB_FORMAT));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -2), DAILY, null, DB_FORMAT));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(HOURLY, 0), HOURLY, null, DB_FORMAT));
 
     th = null;
     try {
@@ -100,17 +114,17 @@ public abstract class TestTimeRangeWriter {
   @Test
   public void testConsecutiveDayParts() throws LensException {
     Set<FactPartition> answeringParts = new LinkedHashSet<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.ONE_DAY_BACK, UpdatePeriod.DAILY, null, null));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, null));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.DAILY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -1), DAILY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -2), DAILY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, 0), DAILY, null, null));
 
     String whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
     validateConsecutive(whereClause, null);
 
     answeringParts = new LinkedHashSet<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.ONE_DAY_BACK, UpdatePeriod.DAILY, null, DB_FORMAT));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.TWODAYS_BACK, UpdatePeriod.DAILY, null, DB_FORMAT));
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.NOW, UpdatePeriod.DAILY, null, DB_FORMAT));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -1), DAILY, null, DB_FORMAT));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -2), DAILY, null, DB_FORMAT));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, 0), DAILY, null, DB_FORMAT));
 
     whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
     validateConsecutive(whereClause, DB_FORMAT);
@@ -119,12 +133,12 @@ public abstract class TestTimeRangeWriter {
   @Test
   public void testSinglePart() throws LensException {
     Set<FactPartition> answeringParts = new LinkedHashSet<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.ONE_DAY_BACK, UpdatePeriod.DAILY, null, null));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -1), DAILY, null, null));
     String whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
     validateSingle(whereClause, null);
 
     answeringParts = new LinkedHashSet<FactPartition>();
-    answeringParts.add(new FactPartition("dt", CubeTestSetup.ONE_DAY_BACK, UpdatePeriod.DAILY, null, DB_FORMAT));
+    answeringParts.add(new FactPartition("dt", getDateWithOffset(DAILY, -1), DAILY, null, DB_FORMAT));
     whereClause = getTimerangeWriter().getTimeRangeWhereClause(null, "test", answeringParts);
     validateSingle(whereClause, DB_FORMAT);
 

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
index 7bd7b6b..b7372f1 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeWriterWithQuery.java
@@ -19,6 +19,11 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.error.LensCubeErrorCode.CANNOT_USE_TIMERANGE_WRITER;
+import static org.apache.lens.cube.metadata.DateFactory.*;
+import static org.apache.lens.cube.metadata.UpdatePeriod.CONTINUOUS;
+import static org.apache.lens.cube.metadata.UpdatePeriod.DAILY;
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import java.text.DateFormat;
@@ -28,7 +33,6 @@ import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.server.api.error.LensException;
 
@@ -45,7 +49,7 @@ import lombok.extern.slf4j.Slf4j;
 public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
 
   private Configuration conf;
-  private final String cubeName = CubeTestSetup.TEST_CUBE_NAME;
+  private final String cubeName = TEST_CUBE_NAME;
 
   @BeforeTest
   public void setupDriver() throws Exception {
@@ -84,39 +88,29 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
       th = e;
       log.error("Semantic exception while testing cube query.", e);
     }
-    if (!CubeTestSetup.isZerothHour()) {
+    if (!isZerothHour()) {
       Assert.assertNotNull(th);
       Assert
-      .assertEquals(th.getErrorCode(), LensCubeErrorCode.CANNOT_USE_TIMERANGE_WRITER.getLensErrorInfo().getErrorCode());
+      .assertEquals(th.getErrorCode(), CANNOT_USE_TIMERANGE_WRITER.getLensErrorInfo().getErrorCode());
     }
     // hourly partitions for two days
-    conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
-
+    conf.setBoolean(FAIL_QUERY_ON_PARTIAL_DATA, true);
     DateFormat qFmt = new SimpleDateFormat("yyyy-MM-dd-HH:mm:ss");
-    Calendar qCal = Calendar.getInstance();
-    Date toDate = qCal.getTime();
-    String qTo = qFmt.format(toDate);
-    qCal.setTime(TWODAYS_BACK);
-    Date from2DaysBackDate = qCal.getTime();
-    String qFrom = qFmt.format(from2DaysBackDate);
-    String twoDaysInRangeClause = " time_range_in(d_time, '"+ qFrom + "', '" + qTo + "')";
+    String twoDaysInRangeClause = getTimeRangeString(DAILY, -2, 0, qFmt);
 
     String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + twoDaysInRangeClause, conf);
     Map<String, String> whereClauses = new HashMap<String, String>();
     whereClauses.put(
-      CubeTestSetup.getDbName() + "c1_testfact",
-      TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt", from2DaysBackDate, toDate,
-          UpdatePeriod.CONTINUOUS.format()));
+      getDbName() + "c1_testfact",
+      TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt",
+        getDateWithOffset(DAILY, -2), getDateWithOffset(DAILY, 0), CONTINUOUS.format()));
     String expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, whereClauses);
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     // multiple range query
-    //from date 4 days back
-    qCal.setTime(BEFORE_4_DAYS_START);
-    Date from4DaysBackDate = qCal.getTime();
-    String qFrom4Days = qFmt.format(from4DaysBackDate);
-    String fourDaysInRangeClause = " time_range_in(d_time, '"+ qFrom4Days + "', '" + qTo + "')";
+    //from date 6 days back
+    String fourDaysInRangeClause = getTimeRangeString(DAILY, -6, 0, qFmt);
 
     hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " where " + twoDaysInRangeClause + " OR "
@@ -124,12 +118,12 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
 
     whereClauses = new HashMap<String, String>();
     whereClauses.put(
-      CubeTestSetup.getDbName() + "c1_testfact",
-      TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt", from2DaysBackDate, toDate,
-          UpdatePeriod.CONTINUOUS.format())
+      getDbName() + "c1_testfact",
+      TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt", getDateWithOffset(DAILY, -2),
+        getDateWithOffset(DAILY, 0), CONTINUOUS.format())
         + " OR"
-        + TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt", from4DaysBackDate, toDate,
-        UpdatePeriod.CONTINUOUS.format()));
+        + TestBetweenTimeRangeWriter.getBetweenClause(cubeName, "dt", getDateWithOffset(DAILY, -6),
+        getDateWithOffset(DAILY, 0), CONTINUOUS.format()));
     expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, whereClauses);
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
@@ -138,9 +132,9 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     conf.set(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT, "yyyy-MM-dd HH:mm:ss");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     whereClauses = new HashMap<String, String>();
-    whereClauses.put(CubeTestSetup.getDbName() + "c1_testfact", TestBetweenTimeRangeWriter.getBetweenClause(cubeName,
-      "dt", getUptoHour(CubeTestSetup.TWODAYS_BACK),
-      getUptoHour(CubeTestSetup.NOW), TestTimeRangeWriter.DB_FORMAT));
+    whereClauses.put(getDbName() + "c1_testfact", TestBetweenTimeRangeWriter.getBetweenClause(cubeName,
+      "dt", getUptoHour(TWODAYS_BACK),
+      getUptoHour(NOW), TestTimeRangeWriter.DB_FORMAT));
     expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, whereClauses);
     System.out.println("HQL:" + hqlQuery);
     TestCubeRewriter.compareQueries(hqlQuery, expected);
@@ -150,20 +144,19 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
   public void testCubeQueryWithTimeDim() throws Exception {
     Configuration tconf = new Configuration(conf);
     // hourly partitions for two days
-    tconf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
+    tconf.setBoolean(FAIL_QUERY_ON_PARTIAL_DATA, true);
     tconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C4");
     tconf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, false);
     tconf.set(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT, "yyyy-MM-dd HH:mm:ss");
     tconf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C4"), "MONTHLY,DAILY,HOURLY");
 
     String query =
-      "SELECT test_time_dim, msr2 FROM testCube where " + "time_range_in(test_time_dim, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
+      "SELECT test_time_dim, msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD;
     String hqlQuery = rewrite(query, tconf);
     Map<String, String> whereClauses = new HashMap<String, String>();
-    whereClauses.put(CubeTestSetup.getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause("hourdim",
-      "full_hour", getUptoHour(CubeTestSetup.TWODAYS_BACK),
-      getUptoHour(getOneLess(CubeTestSetup.NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT));
+    whereClauses.put(getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause("hourdim",
+      "full_hour", getUptoHour(TWODAYS_BACK),
+      getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT));
     System.out.println("HQL:" + hqlQuery);
     String expected =
       getExpectedQuery(cubeName, "select hourdim.full_hour, sum(testcube.msr2) FROM ", " join " + getDbName()
@@ -172,8 +165,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     query =
-      "SELECT msr2 FROM testCube where " + "time_range_in(test_time_dim, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
+      "SELECT msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD;
     hqlQuery = rewrite(query, tconf);
     System.out.println("HQL:" + hqlQuery);
     expected =
@@ -182,9 +174,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     query =
-      "SELECT msr2 FROM testCube where testcube.cityid > 2 and " + "time_range_in(test_time_dim, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW)
-        + "') and testcube.cityid != 5";
+      "SELECT msr2 FROM testCube where testcube.cityid > 2 and " + TWO_DAYS_RANGE_TTD + " and testcube.cityid != 5";
     hqlQuery = rewrite(query, tconf);
     System.out.println("HQL:" + hqlQuery);
     expected =
@@ -196,20 +186,18 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     // multiple range query
     hqlQuery =
       rewrite(
-        "select SUM(msr2) from testCube" + " where time_range_in(test_time_dim, '"
-          + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')"
-          + " OR time_range_in(test_time_dim, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')", tconf);
+        "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE_TTD
+          + " OR " + TWO_DAYS_RANGE_TTD_BEFORE_4_DAYS, tconf);
 
-    whereClauses = new HashMap<String, String>();
+    whereClauses = new HashMap<>();
     whereClauses.put(
-      CubeTestSetup.getDbName() + "c4_testfact2",
-      TestBetweenTimeRangeWriter.getBetweenClause("hourdim", "full_hour", getUptoHour(CubeTestSetup.TWODAYS_BACK),
-        getUptoHour(getOneLess(CubeTestSetup.NOW, UpdatePeriod.HOURLY.calendarField())),
+      getDbName() + "c4_testfact2",
+      TestBetweenTimeRangeWriter.getBetweenClause("hourdim", "full_hour", getUptoHour(TWODAYS_BACK),
+        getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())),
         TestTimeRangeWriter.DB_FORMAT)
         + " OR "
-        + TestBetweenTimeRangeWriter.getBetweenClause("hourdim", "full_hour", getUptoHour(BEFORE_4_DAYS_START),
-        getUptoHour(getOneLess(BEFORE_4_DAYS_END, UpdatePeriod.HOURLY.calendarField())),
+        + TestBetweenTimeRangeWriter.getBetweenClause("hourdim", "full_hour", getUptoHour(BEFORE_6_DAYS),
+        getUptoHour(getOneLess(BEFORE_4_DAYS, UpdatePeriod.HOURLY.calendarField())),
         TestTimeRangeWriter.DB_FORMAT));
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
@@ -219,10 +207,8 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
 
     hqlQuery =
       rewrite(
-        "select to_date(test_time_dim), SUM(msr2) from testCube" + " where time_range_in(test_time_dim, '"
-          + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')"
-          + " OR time_range_in(test_time_dim, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')", tconf);
+        "select to_date(test_time_dim), SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE_TTD
+          + " OR " + TWO_DAYS_RANGE_TTD_BEFORE_4_DAYS, tconf);
 
     expected =
       getExpectedQuery(cubeName, "select to_date(hourdim.full_hour), sum(testcube.msr2) FROM ", " join "
@@ -236,20 +222,19 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
   public void testCubeQueryWithTimeDimThruChain() throws Exception {
     // hourly partitions for two days
     Configuration tconf = new Configuration(conf);
-    tconf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
+    tconf.setBoolean(FAIL_QUERY_ON_PARTIAL_DATA, true);
     tconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C4");
     tconf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, false);
     tconf.set(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT, "yyyy-MM-dd HH:mm:ss");
     tconf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C4"), "MONTHLY,DAILY,HOURLY");
 
     String query =
-      "SELECT test_time_dim2, msr2 FROM testCube where " + "time_range_in(test_time_dim2, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
+      "SELECT test_time_dim2, msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD2;
     String hqlQuery = rewrite(query, tconf);
     Map<String, String> whereClauses = new HashMap<String, String>();
-    whereClauses.put(CubeTestSetup.getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause(
-      "timehourchain", "full_hour", getUptoHour(CubeTestSetup.TWODAYS_BACK),
-      getUptoHour(getOneLess(CubeTestSetup.NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT));
+    whereClauses.put(getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause(
+      "timehourchain", "full_hour", getUptoHour(TWODAYS_BACK),
+      getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT));
     System.out.println("HQL:" + hqlQuery);
     String expected =
       getExpectedQuery(cubeName, "select timehourchain.full_hour, sum(testcube.msr2) FROM ", " join " + getDbName()
@@ -258,8 +243,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     query =
-      "SELECT msr2 FROM testCube where " + "time_range_in(test_time_dim2, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')";
+      "SELECT msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD2;
     hqlQuery = rewrite(query, tconf);
     System.out.println("HQL:" + hqlQuery);
     expected =
@@ -269,9 +253,7 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     query =
-      "SELECT msr2 FROM testCube where testcube.cityid > 2 and " + "time_range_in(test_time_dim2, '"
-        + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW)
-        + "') and testcube.cityid != 5";
+      "SELECT msr2 FROM testCube where testcube.cityid > 2 and " + TWO_DAYS_RANGE_TTD2 + " and testcube.cityid != 5";
     hqlQuery = rewrite(query, tconf);
     System.out.println("HQL:" + hqlQuery);
     expected =
@@ -283,20 +265,18 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
     // multiple range query
     hqlQuery =
       rewrite(
-        "select SUM(msr2) from testCube" + " where time_range_in(test_time_dim2, '"
-          + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')"
-          + " OR time_range_in(test_time_dim2, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')", tconf);
+        "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE_TTD2
+          + " OR " + TWO_DAYS_RANGE_TTD2_BEFORE_4_DAYS, tconf);
 
     whereClauses = new HashMap<String, String>();
     whereClauses.put(
-      CubeTestSetup.getDbName() + "c4_testfact2",
-      TestBetweenTimeRangeWriter.getBetweenClause("timehourchain", "full_hour", getUptoHour(CubeTestSetup.TWODAYS_BACK),
-        getUptoHour(getOneLess(CubeTestSetup.NOW, UpdatePeriod.HOURLY.calendarField())),
+      getDbName() + "c4_testfact2",
+      TestBetweenTimeRangeWriter.getBetweenClause("timehourchain", "full_hour", getUptoHour(TWODAYS_BACK),
+        getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())),
         TestTimeRangeWriter.DB_FORMAT)
         + " OR "
-        + TestBetweenTimeRangeWriter.getBetweenClause("timehourchain", "full_hour", getUptoHour(BEFORE_4_DAYS_START),
-        getUptoHour(getOneLess(BEFORE_4_DAYS_END, UpdatePeriod.HOURLY.calendarField())),
+        + TestBetweenTimeRangeWriter.getBetweenClause("timehourchain", "full_hour", getUptoHour(BEFORE_6_DAYS),
+        getUptoHour(getOneLess(BEFORE_4_DAYS, UpdatePeriod.HOURLY.calendarField())),
         TestTimeRangeWriter.DB_FORMAT));
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName()
@@ -307,10 +287,8 @@ public class TestTimeRangeWriterWithQuery extends TestQueryRewrite {
 
     hqlQuery =
       rewrite(
-        "select to_date(test_time_dim2), SUM(msr2) from testCube" + " where time_range_in(test_time_dim2, '"
-          + CubeTestSetup.getDateUptoHours(TWODAYS_BACK) + "','" + CubeTestSetup.getDateUptoHours(NOW) + "')"
-          + " OR time_range_in(test_time_dim2, '" + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
-          + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')", tconf);
+        "select to_date(test_time_dim2), SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE_TTD2
+          + " OR " +TWO_DAYS_RANGE_TTD2_BEFORE_4_DAYS, tconf);
 
     expected =
       getExpectedQuery(cubeName, "select to_date(timehourchain.full_hour), sum(testcube.msr2) FROM ", " join "

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-server/src/main/java/org/apache/lens/server/query/QueryResultPurger.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryResultPurger.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryResultPurger.java
index 54c6574..2be11ea 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryResultPurger.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryResultPurger.java
@@ -28,7 +28,7 @@ import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.lens.cube.parse.DateUtil;
+import org.apache.lens.cube.metadata.DateUtil;
 import org.apache.lens.server.LensServices;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.metrics.MetricsService;


[05/50] [abbrv] lens git commit: LENS-826 : Updates doc for lens.client.query.poll.interval

Posted by sh...@apache.org.
LENS-826 : Updates doc for lens.client.query.poll.interval


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/bc865870
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/bc865870
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/bc865870

Branch: refs/heads/LENS-581
Commit: bc8658705afc7d8cd582e312278ff30b405b3dc3
Parents: 3ed191a
Author: Raju Bairishetti <ra...@apache.org>
Authored: Wed Nov 18 12:29:54 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Nov 18 12:29:54 2015 +0530

----------------------------------------------------------------------
 lens-client/src/main/resources/lens-client-default.xml |  5 +++++
 src/site/apt/user/client-config.apt                    | 10 ++++++----
 2 files changed, 11 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/bc865870/lens-client/src/main/resources/lens-client-default.xml
----------------------------------------------------------------------
diff --git a/lens-client/src/main/resources/lens-client-default.xml b/lens-client/src/main/resources/lens-client-default.xml
index d0d40c5..1b7c978 100644
--- a/lens-client/src/main/resources/lens-client-default.xml
+++ b/lens-client/src/main/resources/lens-client-default.xml
@@ -46,4 +46,9 @@
     <value>org.apache.lens.client.RequestFilter</value>
     <description>Implementation class for Request Filter</description>
   </property>
+  <property>
+    <name>lens.client.query.poll.interval</name>
+    <value>10</value>
+    <description>Interval at which query progress will be polled. Interval has to be given in milliseconds</description>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/bc865870/src/site/apt/user/client-config.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/user/client-config.apt b/src/site/apt/user/client-config.apt
index 49bc4e2..3d990a9 100644
--- a/src/site/apt/user/client-config.apt
+++ b/src/site/apt/user/client-config.apt
@@ -26,12 +26,14 @@ Lens client configuration
 *--+--+---+--+
 |1|lens.client.dbname|default|Default lens database|
 *--+--+---+--+
-|2|lens.client.requestfilter.ws.filter.impl|org.apache.lens.client.RequestFilter|Implementation class for Request Filter|
+|2|lens.client.query.poll.interval|10|Interval at which query progress will be polled. Interval has to be given in milliseconds|
 *--+--+---+--+
-|3|lens.client.user.name|anonymous|Lens client user name|
+|3|lens.client.requestfilter.ws.filter.impl|org.apache.lens.client.RequestFilter|Implementation class for Request Filter|
 *--+--+---+--+
-|4|lens.client.ws.request.filternames|requestfilter|These JAX-RS filters would be started in the specified order when lens-client starts|
+|4|lens.client.user.name|anonymous|Lens client user name|
 *--+--+---+--+
-|5|lens.server.base.url|http://0.0.0.0:9999/lensapi|The base url for the lens server|
+|5|lens.client.ws.request.filternames|requestfilter|These JAX-RS filters would be started in the specified order when lens-client starts|
+*--+--+---+--+
+|6|lens.server.base.url|http://0.0.0.0:9999/lensapi|The base url for the lens server|
 *--+--+---+--+
 The configuration parameters and their default values


[19/50] [abbrv] lens git commit: LENS-878 : Refactor inner classes in JoinResolver

Posted by sh...@apache.org.
LENS-878 : Refactor inner classes in JoinResolver


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/f7ab827e
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/f7ab827e
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/f7ab827e

Branch: refs/heads/LENS-581
Commit: f7ab827e967e8a6b44cd8d540e293dbd01ff8d9b
Parents: 7a3a173
Author: Amareshwari Sriramadasu <am...@gmail.com>
Authored: Wed Nov 25 14:22:37 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Wed Nov 25 14:22:37 2015 +0530

----------------------------------------------------------------------
 .../apache/lens/cube/parse/AutoJoinContext.java | 760 ++++++++++++++
 .../lens/cube/parse/CubeQueryContext.java       |   2 +-
 .../org/apache/lens/cube/parse/JoinClause.java  | 144 +++
 .../apache/lens/cube/parse/JoinResolver.java    | 982 +------------------
 .../org/apache/lens/cube/parse/JoinTree.java    | 164 ++++
 .../lens/cube/parse/TimerangeResolver.java      |   2 +-
 6 files changed, 1076 insertions(+), 978 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/f7ab827e/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
new file mode 100644
index 0000000..9472506
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
@@ -0,0 +1,760 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import java.util.*;
+
+import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.metadata.*;
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.ql.parse.JoinType;
+
+import lombok.Getter;
+import lombok.Setter;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * Store join chain information resolved by join resolver
+ */
+@Slf4j
+public class AutoJoinContext {
+  // Map of a joined table to list of all possible paths from that table to
+  // the target
+  private final Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths;
+  @Getter
+  // User supplied partial join conditions
+  private final Map<AbstractCubeTable, String> partialJoinConditions;
+  // True if the query contains user supplied partial join conditions
+  @Getter
+  private final boolean partialJoinChains;
+  @Getter
+  // Map of joined table to the join type (if provided by user)
+  private final Map<AbstractCubeTable, JoinType> tableJoinTypeMap;
+
+  // True if joins were resolved automatically
+  private boolean joinsResolved;
+  // Target table for the auto join resolver
+  private final AbstractCubeTable autoJoinTarget;
+  // Configuration string to control join type
+  private String joinTypeCfg;
+
+  // Map of a joined table to its columns which are part of any of the join
+  // paths. This is used in candidate table resolver
+  @Getter
+  private Map<Dimension, Map<AbstractCubeTable, List<String>>> joinPathFromColumns =
+    new HashMap<Dimension, Map<AbstractCubeTable, List<String>>>();
+
+  @Getter
+  private Map<Dimension, Map<AbstractCubeTable, List<String>>> joinPathToColumns =
+    new HashMap<Dimension, Map<AbstractCubeTable, List<String>>>();
+
+  // there can be separate join clause for each fact incase of multi fact queries
+  @Getter
+  Map<CandidateFact, JoinClause> factClauses = new HashMap<CandidateFact, JoinClause>();
+  @Getter
+  @Setter
+  JoinClause minCostClause;
+  private final boolean flattenBridgeTables;
+  private final String bridgeTableFieldAggr;
+
+  public AutoJoinContext(Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths,
+                         Map<Dimension, CubeQueryContext.OptionalDimCtx> optionalDimensions,
+                         Map<AbstractCubeTable, String> partialJoinConditions,
+                         boolean partialJoinChains, Map<AbstractCubeTable, JoinType> tableJoinTypeMap,
+                         AbstractCubeTable autoJoinTarget, String joinTypeCfg, boolean joinsResolved,
+                         boolean flattenBridgeTables, String bridgeTableFieldAggr) {
+    this.allPaths = allPaths;
+    initJoinPathColumns();
+    this.partialJoinConditions = partialJoinConditions;
+    this.partialJoinChains = partialJoinChains;
+    this.tableJoinTypeMap = tableJoinTypeMap;
+    this.autoJoinTarget = autoJoinTarget;
+    this.joinTypeCfg = joinTypeCfg;
+    this.joinsResolved = joinsResolved;
+    this.flattenBridgeTables = flattenBridgeTables;
+    this.bridgeTableFieldAggr = bridgeTableFieldAggr;
+    log.debug("All join paths:{}", allPaths);
+    log.debug("Join path from columns:{}", joinPathFromColumns);
+    log.debug("Join path to columns:{}", joinPathToColumns);
+  }
+
+  public AbstractCubeTable getAutoJoinTarget() {
+    return autoJoinTarget;
+  }
+
+  private JoinClause getJoinClause(CandidateFact fact) {
+    if (fact == null) {
+      return minCostClause;
+    }
+    return factClauses.get(fact);
+  }
+
+  // Populate map of tables to their columns which are present in any of the
+  // join paths
+  private void initJoinPathColumns() {
+    for (List<SchemaGraph.JoinPath> paths : allPaths.values()) {
+      for (int i = 0; i < paths.size(); i++) {
+        SchemaGraph.JoinPath jp = paths.get(i);
+        jp.initColumnsForTable();
+      }
+    }
+    refreshJoinPathColumns();
+  }
+
+  public void refreshJoinPathColumns() {
+    joinPathFromColumns.clear();
+    joinPathToColumns.clear();
+    for (Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> joinPathEntry : allPaths.entrySet()) {
+      List<SchemaGraph.JoinPath> joinPaths = joinPathEntry.getValue();
+      Map<AbstractCubeTable, List<String>> fromColPaths = joinPathFromColumns.get(joinPathEntry.getKey().getObject());
+      Map<AbstractCubeTable, List<String>> toColPaths = joinPathToColumns.get(joinPathEntry.getKey().getObject());
+      if (fromColPaths == null) {
+        fromColPaths = new HashMap<AbstractCubeTable, List<String>>();
+        joinPathFromColumns.put(joinPathEntry.getKey().getObject(), fromColPaths);
+      }
+
+      if (toColPaths == null) {
+        toColPaths = new HashMap<AbstractCubeTable, List<String>>();
+        joinPathToColumns.put(joinPathEntry.getKey().getObject(), toColPaths);
+      }
+      populateJoinPathCols(joinPaths, fromColPaths, toColPaths);
+    }
+  }
+
+  private void populateJoinPathCols(List<SchemaGraph.JoinPath> joinPaths,
+    Map<AbstractCubeTable, List<String>> fromPathColumns, Map<AbstractCubeTable, List<String>> toPathColumns) {
+    for (SchemaGraph.JoinPath path : joinPaths) {
+      for (SchemaGraph.TableRelationship edge : path.getEdges()) {
+        AbstractCubeTable fromTable = edge.getFromTable();
+        String fromColumn = edge.getFromColumn();
+        List<String> columnsOfFromTable = fromPathColumns.get(fromTable);
+        if (columnsOfFromTable == null) {
+          columnsOfFromTable = new ArrayList<String>();
+          fromPathColumns.put(fromTable, columnsOfFromTable);
+        }
+        columnsOfFromTable.add(fromColumn);
+
+        // Similarly populate for the 'to' table
+        AbstractCubeTable toTable = edge.getToTable();
+        String toColumn = edge.getToColumn();
+        List<String> columnsOfToTable = toPathColumns.get(toTable);
+        if (columnsOfToTable == null) {
+          columnsOfToTable = new ArrayList<String>();
+          toPathColumns.put(toTable, columnsOfToTable);
+        }
+        columnsOfToTable.add(toColumn);
+      }
+    }
+  }
+
+  public void removeJoinedTable(Dimension dim) {
+    allPaths.remove(Aliased.create(dim));
+    joinPathFromColumns.remove(dim);
+  }
+
+  public Map<AbstractCubeTable, String> getPartialJoinConditions() {
+    return partialJoinConditions;
+  }
+
+  public String getFromString(String fromTable, CandidateFact fact, Set<Dimension> qdims,
+    Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql) throws LensException {
+    String fromString = fromTable;
+    log.info("All paths dump:{}", cubeql.getAutoJoinCtx().getAllPaths());
+    if (qdims == null || qdims.isEmpty()) {
+      return fromString;
+    }
+    // Compute the merged join clause string for the min cost joinclause
+    String clause = getMergedJoinClause(cubeql, cubeql.getAutoJoinCtx().getJoinClause(fact), dimsToQuery);
+
+    fromString += clause;
+    return fromString;
+  }
+
+  // Some refactoring needed to account for multiple join paths
+  public String getMergedJoinClause(CubeQueryContext cubeql, JoinClause joinClause,
+                                    Map<Dimension, CandidateDim> dimsToQuery) {
+    Set<String> clauses = new LinkedHashSet<String>();
+    String joinTypeStr = "";
+    JoinType joinType = JoinType.INNER;
+
+    // this flag is set to true if user has specified a partial join chain
+    if (!partialJoinChains) {
+      // User has not specified any join conditions. In this case, we rely on
+      // configuration for the join type
+      if (StringUtils.isNotBlank(joinTypeCfg)) {
+        joinType = JoinType.valueOf(joinTypeCfg.toUpperCase());
+        joinTypeStr = JoinResolver.getJoinTypeStr(joinType);
+      }
+    }
+
+    Iterator<JoinTree> iter = joinClause.getJoinTree().dft();
+    boolean hasBridgeTable = false;
+    boolean initedBridgeClauses = false;
+    StringBuilder bridgeSelectClause = new StringBuilder();
+    StringBuilder bridgeFromClause = new StringBuilder();
+    StringBuilder bridgeFilterClause = new StringBuilder();
+    StringBuilder bridgeJoinClause = new StringBuilder();
+    StringBuilder bridgeGroupbyClause = new StringBuilder();
+
+    while (iter.hasNext()) {
+      JoinTree cur = iter.next();
+      if (partialJoinChains) {
+        joinType = cur.getJoinType();
+        joinTypeStr = JoinResolver.getJoinTypeStr(joinType);
+      }
+      SchemaGraph.TableRelationship rel = cur.parentRelationship;
+      String toAlias, fromAlias;
+      fromAlias = cur.parent.getAlias();
+      toAlias = cur.getAlias();
+      hasBridgeTable = flattenBridgeTables && (hasBridgeTable || rel.isMapsToMany());
+      // We have to push user specified filters for the joined tables
+      String userFilter = null;
+      // Partition condition on the tables also needs to be pushed depending
+      // on the join
+      String storageFilter = null;
+
+      if (JoinType.INNER == joinType || JoinType.LEFTOUTER == joinType || JoinType.LEFTSEMI == joinType) {
+        // For inner and left joins push filter of right table
+        userFilter = partialJoinConditions.get(rel.getToTable());
+        if (partialJoinConditions.containsKey(rel.getFromTable())) {
+          if (StringUtils.isNotBlank(userFilter)) {
+            userFilter += (" AND " + partialJoinConditions.get(rel.getFromTable()));
+          } else {
+            userFilter = partialJoinConditions.get(rel.getFromTable());
+          }
+        }
+        storageFilter = getStorageFilter(dimsToQuery, rel.getToTable(), toAlias);
+        dimsToQuery.get(rel.getToTable()).setWhereClauseAdded();
+      } else if (JoinType.RIGHTOUTER == joinType) {
+        // For right outer joins, push filters of left table
+        userFilter = partialJoinConditions.get(rel.getFromTable());
+        if (partialJoinConditions.containsKey(rel.getToTable())) {
+          if (StringUtils.isNotBlank(userFilter)) {
+            userFilter += (" AND " + partialJoinConditions.get(rel.getToTable()));
+          } else {
+            userFilter = partialJoinConditions.get(rel.getToTable());
+          }
+        }
+        if (rel.getFromTable() instanceof Dimension) {
+          storageFilter = getStorageFilter(dimsToQuery, rel.getFromTable(), fromAlias);
+          dimsToQuery.get(rel.getFromTable()).setWhereClauseAdded();
+        }
+      } else if (JoinType.FULLOUTER == joinType) {
+        // For full outer we need to push filters of both left and right
+        // tables in the join clause
+        String leftFilter = null, rightFilter = null;
+        String leftStorageFilter = null, rightStorgeFilter = null;
+
+        if (StringUtils.isNotBlank(partialJoinConditions.get(rel.getFromTable()))) {
+          leftFilter = partialJoinConditions.get(rel.getFromTable()) + " and ";
+        }
+
+        if (rel.getFromTable() instanceof Dimension) {
+          leftStorageFilter = getStorageFilter(dimsToQuery, rel.getFromTable(), fromAlias);
+          if (StringUtils.isNotBlank((leftStorageFilter))) {
+            dimsToQuery.get(rel.getFromTable()).setWhereClauseAdded();
+          }
+        }
+
+        if (StringUtils.isNotBlank(partialJoinConditions.get(rel.getToTable()))) {
+          rightFilter = partialJoinConditions.get(rel.getToTable());
+        }
+
+        rightStorgeFilter = getStorageFilter(dimsToQuery, rel.getToTable(), toAlias);
+        if (StringUtils.isNotBlank(rightStorgeFilter)) {
+          if (StringUtils.isNotBlank((leftStorageFilter))) {
+            leftStorageFilter += " and ";
+          }
+          dimsToQuery.get(rel.getToTable()).setWhereClauseAdded();
+        }
+
+        userFilter = (leftFilter == null ? "" : leftFilter) + (rightFilter == null ? "" : rightFilter);
+        storageFilter =
+          (leftStorageFilter == null ? "" : leftStorageFilter)
+            + (rightStorgeFilter == null ? "" : rightStorgeFilter);
+      }
+      StringBuilder clause = new StringBuilder();
+
+      // if a bridge table is present in the path
+      if (hasBridgeTable) {
+        // if any relation has bridge table, the clause becomes the following :
+        // join (" select " + joinkey + " aggr over fields from bridge table + from bridgeTable + [where user/storage
+        // filters] + groupby joinkey) on joincond"
+        // Or
+        // " join (select " + joinkey + " aggr over fields from table reached through bridge table + from bridge table
+        // join <next tables> on join condition + [and user/storage filters] + groupby joinkey) on joincond
+        if (!initedBridgeClauses) {
+          // we just found a bridge table in the path we need to initialize the clauses for subquery required for
+          // aggregating fields of bridge table
+          // initiliaze select clause with join key
+          bridgeSelectClause.append(" (select ").append(toAlias).append(".").append(rel.getToColumn()).append(" as ")
+          .append(rel.getToColumn());
+          // group by join key
+          bridgeGroupbyClause.append(" group by ").append(toAlias).append(".").append(rel.getToColumn());
+          // from clause with bridge table
+          bridgeFromClause.append(" from ").append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
+          // we need to initialize filter clause with user filter clause or storgae filter if applicable
+          if (StringUtils.isNotBlank(userFilter)) {
+            bridgeFilterClause.append(userFilter);
+          }
+          if (StringUtils.isNotBlank(storageFilter)) {
+            if (StringUtils.isNotBlank(bridgeFilterClause.toString())) {
+              bridgeFilterClause.append(" and ");
+            }
+            bridgeFilterClause.append(storageFilter);
+          }
+          // initialize final join clause
+          bridgeJoinClause.append(" on ").append(fromAlias).append(".")
+            .append(rel.getFromColumn()).append(" = ").append("%s")
+            .append(".").append(rel.getToColumn());
+          initedBridgeClauses = true;
+        } else {
+          // if bridge clauses are already inited, this is a next table getting joined with bridge table
+          // we will append a simple join clause
+          bridgeFromClause.append(joinTypeStr).append(" join ");
+          bridgeFromClause.append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
+          bridgeFromClause.append(" on ").append(fromAlias).append(".")
+            .append(rel.getFromColumn()).append(" = ").append(toAlias)
+            .append(".").append(rel.getToColumn());
+
+          if (StringUtils.isNotBlank(userFilter)) {
+            bridgeFromClause.append(" and ").append(userFilter);
+          }
+          if (StringUtils.isNotBlank(storageFilter)) {
+            bridgeFromClause.append(" and ").append(storageFilter);
+          }
+        }
+        if (cubeql.getTblAliasToColumns().get(toAlias) != null
+          && !cubeql.getTblAliasToColumns().get(toAlias).isEmpty()) {
+          // there are fields selected from this table after seeing bridge table in path
+          // we should make subquery for this selection
+          clause.append(joinTypeStr).append(" join ");
+          clause.append(bridgeSelectClause.toString());
+          for (String col : cubeql.getTblAliasToColumns().get(toAlias)) {
+            clause.append(",").append(bridgeTableFieldAggr).append("(").append(toAlias)
+              .append(".").append(col)
+              .append(")")
+              .append(" as ").append(col);
+          }
+          String bridgeFrom = bridgeFromClause.toString();
+          clause.append(bridgeFrom);
+          String bridgeFilter = bridgeFilterClause.toString();
+          if (StringUtils.isNotBlank(bridgeFilter)) {
+            if (bridgeFrom.contains(" join ")) {
+              clause.append(" and ");
+            } else {
+              clause.append(" where");
+            }
+            clause.append(bridgeFilter.toString());
+          }
+          clause.append(bridgeGroupbyClause.toString());
+          clause.append(") ").append(toAlias);
+          clause.append(String.format(bridgeJoinClause.toString(), toAlias));
+          clauses.add(clause.toString());
+        }
+        if (cur.getSubtrees().isEmpty()) {
+          // clear bridge flags and builders, as there are no more clauses in this tree.
+          hasBridgeTable = false;
+          initedBridgeClauses = false;
+          bridgeSelectClause.setLength(0);
+          bridgeFromClause.setLength(0);
+          bridgeFilterClause.setLength(0);
+          bridgeJoinClause.setLength(0);
+          bridgeGroupbyClause.setLength(0);
+        }
+      } else {
+        // Simple join clause is :
+        // jointype + " join " + destTable + " on " + joincond + [" and" + userfilter] + ["and" + storageFilter]
+        clause.append(joinTypeStr).append(" join ");
+        //Add storage table name followed by alias
+        clause.append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
+        clause.append(" on ").append(fromAlias).append(".")
+          .append(rel.getFromColumn()).append(" = ").append(toAlias)
+          .append(".").append(rel.getToColumn());
+
+        if (StringUtils.isNotBlank(userFilter)) {
+          clause.append(" and ").append(userFilter);
+        }
+        if (StringUtils.isNotBlank(storageFilter)) {
+          clause.append(" and ").append(storageFilter);
+        }
+        clauses.add(clause.toString());
+      }
+    }
+    return StringUtils.join(clauses, "");
+  }
+
+  public Set<Dimension> getDimsOnPath(Map<Aliased<Dimension>, List<SchemaGraph.TableRelationship>> joinChain,
+    Set<Dimension> qdims) {
+    Set<Dimension> dimsOnPath = new HashSet<Dimension>();
+    for (Map.Entry<Aliased<Dimension>, List<SchemaGraph.TableRelationship>> entry : joinChain.entrySet()) {
+      List<SchemaGraph.TableRelationship> chain = entry.getValue();
+      Dimension table = entry.getKey().getObject();
+
+      // check if join with this dimension is required
+      if (!qdims.contains(table)) {
+        continue;
+      }
+
+      for (int i = chain.size() - 1; i >= 0; i--) {
+        SchemaGraph.TableRelationship rel = chain.get(i);
+        dimsOnPath.add((Dimension) rel.getToTable());
+      }
+    }
+    return dimsOnPath;
+  }
+
+  private String getStorageFilter(Map<Dimension, CandidateDim> dimsToQuery, AbstractCubeTable table, String alias) {
+    String whereClause = "";
+    if (dimsToQuery != null && dimsToQuery.get(table) != null) {
+      if (StringUtils.isNotBlank(dimsToQuery.get(table).getWhereClause())) {
+        whereClause = dimsToQuery.get(table).getWhereClause();
+        if (alias != null) {
+          whereClause = StorageUtil.getWhereClause(whereClause, alias);
+        }
+      }
+    }
+    return whereClause;
+  }
+
+  /**
+   * @return the joinsResolved
+   */
+  public boolean isJoinsResolved() {
+    return joinsResolved;
+  }
+
+  // Includes both queried join paths and optional join paths
+  public Set<String> getAllJoinPathColumnsOfTable(AbstractCubeTable table) {
+    Set<String> allPaths = new HashSet<String>();
+    for (Map<AbstractCubeTable, List<String>> optPaths : joinPathFromColumns.values()) {
+      if (optPaths.get(table) != null) {
+        allPaths.addAll(optPaths.get(table));
+      }
+    }
+
+    for (Map<AbstractCubeTable, List<String>> optPaths : joinPathToColumns.values()) {
+      if (optPaths.get(table) != null) {
+        allPaths.addAll(optPaths.get(table));
+      }
+    }
+
+    return allPaths;
+  }
+
+  public void pruneAllPaths(CubeInterface cube, final Set<CandidateFact> cfacts,
+    final Map<Dimension, CandidateDim> dimsToQuery) {
+    // Remove join paths which cannot be satisfied by the resolved candidate
+    // fact and dimension tables
+    if (cfacts != null) {
+      // include columns from all picked facts
+      Set<String> factColumns = new HashSet<String>();
+      for (CandidateFact cfact : cfacts) {
+        factColumns.addAll(cfact.getColumns());
+      }
+
+      for (List<SchemaGraph.JoinPath> paths : allPaths.values()) {
+        for (int i = 0; i < paths.size(); i++) {
+          SchemaGraph.JoinPath jp = paths.get(i);
+          List<String> cubeCols = jp.getColumnsForTable((AbstractCubeTable) cube);
+          if (cubeCols != null && !factColumns.containsAll(cubeCols)) {
+            // This path requires some columns from the cube which are not
+            // present in the candidate fact
+            // Remove this path
+            log.info("Removing join path:{} as columns :{} dont exist", jp, cubeCols);
+            paths.remove(i);
+            i--;
+          }
+        }
+      }
+      pruneEmptyPaths(allPaths);
+    }
+    pruneAllPaths(dimsToQuery);
+  }
+
+  /**
+   * Prunes allPaths by removing paths which contain columns that are not present in any candidate dims.
+   *
+   * @param candidateDims
+   */
+  public void pruneAllPathsForCandidateDims(Map<Dimension, Set<CandidateDim>> candidateDims) {
+    Map<Dimension, Set<String>> dimColumns = new HashMap<Dimension, Set<String>>();
+    // populate all columns present in candidate dims for each dimension
+    for (Map.Entry<Dimension, Set<CandidateDim>> entry : candidateDims.entrySet()) {
+      Dimension dim = entry.getKey();
+      Set<String> allColumns = new HashSet<String>();
+      for (CandidateDim cdim : entry.getValue()) {
+        allColumns.addAll(cdim.getColumns());
+      }
+      dimColumns.put(dim, allColumns);
+    }
+    for (List<SchemaGraph.JoinPath> paths : allPaths.values()) {
+      for (int i = 0; i < paths.size(); i++) {
+        SchemaGraph.JoinPath jp = paths.get(i);
+        for (AbstractCubeTable refTable : jp.getAllTables()) {
+          List<String> cols = jp.getColumnsForTable(refTable);
+          if (refTable instanceof Dimension) {
+            if (cols != null && (dimColumns.get(refTable) == null || !dimColumns.get(refTable).containsAll(cols))) {
+              // This path requires some columns from the cube which are not present in any candidate dim
+              // Remove this path
+              log.info("Removing join path:{} as columns :{} dont exist", jp, cols);
+              paths.remove(i);
+              i--;
+              break;
+            }
+          }
+        }
+      }
+    }
+    pruneEmptyPaths(allPaths);
+  }
+
+  private void pruneEmptyPaths(Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths) {
+    Iterator<Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>>> iter = allPaths.entrySet().iterator();
+    while (iter.hasNext()) {
+      Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> entry = iter.next();
+      if (entry.getValue().isEmpty()) {
+        iter.remove();
+      }
+    }
+  }
+
+  private Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> pruneFactPaths(CubeInterface cube,
+    final CandidateFact cfact) {
+    Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> prunedPaths
+      = new HashMap<Aliased<Dimension>, List<SchemaGraph.JoinPath>>();
+    // Remove join paths which cannot be satisfied by the candidate fact
+    for (Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> ppaths : allPaths.entrySet()) {
+      prunedPaths.put(ppaths.getKey(), new ArrayList<SchemaGraph.JoinPath>(ppaths.getValue()));
+      List<SchemaGraph.JoinPath> paths = prunedPaths.get(ppaths.getKey());
+      for (int i = 0; i < paths.size(); i++) {
+        SchemaGraph.JoinPath jp = paths.get(i);
+        List<String> cubeCols = jp.getColumnsForTable((AbstractCubeTable) cube);
+        if (cubeCols != null && !cfact.getColumns().containsAll(cubeCols)) {
+          // This path requires some columns from the cube which are not
+          // present in the candidate fact
+          // Remove this path
+          log.info("Removing join path:{} as columns :{} dont exist", jp, cubeCols);
+          paths.remove(i);
+          i--;
+        }
+      }
+    }
+    pruneEmptyPaths(prunedPaths);
+    return prunedPaths;
+  }
+
+  private void pruneAllPaths(final Map<Dimension, CandidateDim> dimsToQuery) {
+    // Remove join paths which cannot be satisfied by the resolved dimension
+    // tables
+    if (dimsToQuery != null && !dimsToQuery.isEmpty()) {
+      for (CandidateDim candidateDim : dimsToQuery.values()) {
+        Set<String> dimCols = candidateDim.dimtable.getAllFieldNames();
+        for (List<SchemaGraph.JoinPath> paths : allPaths.values()) {
+          for (int i = 0; i < paths.size(); i++) {
+            SchemaGraph.JoinPath jp = paths.get(i);
+            List<String> candidateDimCols = jp.getColumnsForTable(candidateDim.getBaseTable());
+            if (candidateDimCols != null && !dimCols.containsAll(candidateDimCols)) {
+              // This path requires some columns from the dimension which are
+              // not present in the candidate dim
+              // Remove this path
+              log.info("Removing join path:{} as columns :{} dont exist", jp, candidateDimCols);
+              paths.remove(i);
+              i--;
+            }
+          }
+        }
+      }
+      pruneEmptyPaths(allPaths);
+    }
+  }
+
+  /**
+   * There can be multiple join paths between a dimension and the target. Set of all possible join clauses is the
+   * cartesian product of join paths of all dimensions
+   */
+  private Iterator<JoinClause> getJoinClausesForAllPaths(final CandidateFact fact,
+    final Set<Dimension> qdims, final CubeQueryContext cubeql) {
+    Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths;
+    // if fact is passed only look at paths possible from fact to dims
+    if (fact != null) {
+      allPaths = pruneFactPaths(cubeql.getCube(), fact);
+    } else {
+      allPaths = new LinkedHashMap<Aliased<Dimension>, List<SchemaGraph.JoinPath>>(this.allPaths);
+    }
+    // prune allPaths with qdims
+    log.info("pruning allPaths before generating all permutations.");
+    log.info("allPaths: {}", allPaths);
+    log.info("qdims: {}", qdims);
+    pruneAllPathsWithQueriedDims(allPaths, qdims);
+
+    // Number of paths in each path set
+    final int[] groupSizes = new int[allPaths.values().size()];
+    // Total number of elements in the cartesian product
+    int numSamples = 1;
+    // All path sets
+    final List<List<SchemaGraph.JoinPath>> pathSets = new ArrayList<List<SchemaGraph.JoinPath>>();
+    // Dimension corresponding to the path sets
+    final List<Aliased<Dimension>> dimensions = new ArrayList<Aliased<Dimension>>(groupSizes.length);
+
+    int i = 0;
+    for (Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> entry : allPaths.entrySet()) {
+      dimensions.add(entry.getKey());
+      List<SchemaGraph.JoinPath> group = entry.getValue();
+      pathSets.add(group);
+      groupSizes[i] = group.size();
+      numSamples *= groupSizes[i];
+      i++;
+    }
+
+    final int[] selection = new int[groupSizes.length];
+    final int MAX_SAMPLE_COUNT = numSamples;
+
+    // Return a lazy iterator over all possible join chains
+    return new Iterator<JoinClause>() {
+      int sample = 0;
+
+      @Override
+      public boolean hasNext() {
+        return sample < MAX_SAMPLE_COUNT;
+      }
+
+      @Override
+      public JoinClause next() {
+        Map<Aliased<Dimension>, List<SchemaGraph.TableRelationship>> chain
+          = new LinkedHashMap<Aliased<Dimension>, List<SchemaGraph.TableRelationship>>();
+        //generate next permutation.
+        for (int i = groupSizes.length - 1, base = sample; i >= 0; base /= groupSizes[i], i--) {
+          selection[i] = base % groupSizes[i];
+        }
+        for (int i = 0; i < selection.length; i++) {
+          int selectedPath = selection[i];
+          List<SchemaGraph.TableRelationship> path = pathSets.get(i).get(selectedPath).getEdges();
+          chain.put(dimensions.get(i), path);
+        }
+
+        Set<Dimension> dimsOnPath = getDimsOnPath(chain, qdims);
+
+        sample++;
+        // Cost of join = number of tables joined in the clause
+        return new JoinClause(cubeql, chain, dimsOnPath);
+      }
+
+      @Override
+      public void remove() {
+        throw new UnsupportedOperationException("Cannot remove elements!");
+      }
+    };
+  }
+
+  /**
+   * Given allPaths, it will remove entries where key is a non-join chain dimension and not contained in qdims
+   *
+   * @param allPaths
+   * @param qdims
+   */
+  private void pruneAllPathsWithQueriedDims(Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths,
+    Set<Dimension> qdims) {
+    Iterator<Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>>> iter = allPaths.entrySet().iterator();
+    while (iter.hasNext()) {
+      Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> cur = iter.next();
+      if (!qdims.contains(cur.getKey().getObject())) {
+        log.info("removing from allPaths: {}", cur);
+        iter.remove();
+      }
+    }
+  }
+
+  public Set<Dimension> pickOptionalTables(final CandidateFact fact,
+    Set<Dimension> qdims, CubeQueryContext cubeql) throws LensException {
+    // Find the min cost join clause and add dimensions in the clause as optional dimensions
+    Set<Dimension> joiningOptionalTables = new HashSet<Dimension>();
+    if (qdims == null) {
+      return joiningOptionalTables;
+    }
+    // find least cost path
+    Iterator<JoinClause> itr = getJoinClausesForAllPaths(fact, qdims, cubeql);
+    JoinClause minCostClause = null;
+    while (itr.hasNext()) {
+      JoinClause clause = itr.next();
+      if (minCostClause == null || minCostClause.getCost() > clause.getCost()) {
+        minCostClause = clause;
+      }
+    }
+
+    if (minCostClause == null) {
+      throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getLensErrorInfo(),
+          qdims.toString(), autoJoinTarget.getName());
+    }
+
+    log.info("Fact: {} minCostClause:{}", fact, minCostClause);
+    if (fact != null) {
+      cubeql.getAutoJoinCtx().getFactClauses().put(fact, minCostClause);
+    } else {
+      cubeql.getAutoJoinCtx().setMinCostClause(minCostClause);
+    }
+    for (Dimension dim : minCostClause.getDimsInPath()) {
+      if (!qdims.contains(dim)) {
+        joiningOptionalTables.add(dim);
+      }
+    }
+
+    minCostClause.initChainColumns();
+    // prune candidate dims of joiningOptionalTables wrt joinging columns
+    for (Dimension dim : joiningOptionalTables) {
+      for (Iterator<CandidateDim> i = cubeql.getCandidateDimTables().get(dim).iterator(); i.hasNext();) {
+        CandidateDim cdim = i.next();
+        CubeDimensionTable dimtable = cdim.dimtable;
+        if (!cdim.getColumns().containsAll(minCostClause.chainColumns.get(dim))) {
+          i.remove();
+          log.info("Not considering dimtable:{} as its columns are not part of any join paths. Join columns:{}",
+            dimtable, minCostClause.chainColumns.get(dim));
+          cubeql.addDimPruningMsgs(dim, cdim.dimtable,
+            CandidateTablePruneCause.noColumnPartOfAJoinPath(minCostClause.chainColumns.get(dim)));
+        }
+      }
+      if (cubeql.getCandidateDimTables().get(dim).size() == 0) {
+        throw new LensException(LensCubeErrorCode.NO_DIM_HAS_COLUMN.getLensErrorInfo(), dim.getName(),
+          minCostClause.chainColumns.get(dim).toString());
+      }
+    }
+
+    return joiningOptionalTables;
+  }
+
+  public Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> getAllPaths() {
+    return allPaths;
+  }
+
+  public boolean isReachableDim(Dimension dim) {
+    Aliased<Dimension> aliased = Aliased.create(dim);
+    return isReachableDim(aliased);
+  }
+
+  public boolean isReachableDim(Dimension dim, String alias) {
+    Aliased<Dimension> aliased = Aliased.create(dim, alias);
+    return isReachableDim(aliased);
+  }
+
+  private boolean isReachableDim(Aliased<Dimension> aliased) {
+    return allPaths.containsKey(aliased) && !allPaths.get(aliased).isEmpty();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/f7ab827e/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index a660133..cf114c9 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -154,7 +154,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
   private CubeMetastoreClient metastoreClient;
   @Getter
   @Setter
-  private JoinResolver.AutoJoinContext autoJoinCtx;
+  private AutoJoinContext autoJoinCtx;
   @Getter
   @Setter
   private ExpressionResolver.ExpressionResolverContext exprCtx;

http://git-wip-us.apache.org/repos/asf/lens/blob/f7ab827e/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinClause.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinClause.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinClause.java
new file mode 100644
index 0000000..d9a8249
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinClause.java
@@ -0,0 +1,144 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import java.util.*;
+
+import org.apache.lens.cube.metadata.AbstractCubeTable;
+import org.apache.lens.cube.metadata.Dimension;
+import org.apache.lens.cube.metadata.SchemaGraph;
+
+import org.apache.hadoop.hive.ql.parse.JoinType;
+
+import lombok.Getter;
+import lombok.ToString;
+
+@ToString
+public class JoinClause implements Comparable<JoinClause> {
+  private final int cost;
+  // all dimensions in path except target
+  @Getter
+  private final Set<Dimension> dimsInPath;
+  private CubeQueryContext cubeql;
+  private final Map<Aliased<Dimension>, List<SchemaGraph.TableRelationship>> chain;
+  @Getter
+  private final JoinTree joinTree;
+  transient Map<AbstractCubeTable, Set<String>> chainColumns = new HashMap<AbstractCubeTable, Set<String>>();
+
+  public JoinClause(CubeQueryContext cubeql, Map<Aliased<Dimension>,
+    List<SchemaGraph.TableRelationship>> chain, Set<Dimension> dimsInPath) {
+    this.cubeql = cubeql;
+    this.chain = chain;
+    this.joinTree = mergeJoinChains(chain);
+    this.cost = joinTree.getNumEdges();
+    this.dimsInPath = dimsInPath;
+  }
+
+  void initChainColumns() {
+    for (List<SchemaGraph.TableRelationship> path : chain.values()) {
+      for (SchemaGraph.TableRelationship edge : path) {
+        Set<String> fcols = chainColumns.get(edge.getFromTable());
+        if (fcols == null) {
+          fcols = new HashSet<String>();
+          chainColumns.put(edge.getFromTable(), fcols);
+        }
+        fcols.add(edge.getFromColumn());
+
+        Set<String> tocols = chainColumns.get(edge.getToTable());
+        if (tocols == null) {
+          tocols = new HashSet<String>();
+          chainColumns.put(edge.getToTable(), tocols);
+        }
+        tocols.add(edge.getToColumn());
+      }
+    }
+  }
+
+  public int getCost() {
+    return cost;
+  }
+
+  @Override
+  public int compareTo(JoinClause joinClause) {
+    return cost - joinClause.getCost();
+  }
+
+  /**
+   * Takes chains and merges them in the form of a tree. If two chains have some common path till some table and
+   * bifurcate from there, then in the chain, both paths will have the common path but the resultant tree will have
+   * single path from root(cube) to that table and paths will bifurcate from there.
+   * <p/>
+   * For example, citystate   =   [basecube.cityid=citydim.id], [citydim.stateid=statedim.id]
+   *              cityzip     =   [basecube.cityid=citydim.id], [citydim.zipcode=zipdim.code]
+   * <p/>
+   * Without merging, the behaviour is like this:
+   * <p/>
+   * <p/>
+   *                  (basecube.cityid=citydim.id)          (citydim.stateid=statedim.id)
+   *                  _____________________________citydim____________________________________statedim
+   *                 |
+   *   basecube------|
+   *                 |_____________________________citydim____________________________________zipdim
+   *
+   *                  (basecube.cityid=citydim.id)          (citydim.zipcode=zipdim.code)
+   *
+   * <p/>
+   * Merging will result in a tree like following
+   * <p/>                                                  (citydim.stateid=statedim.id)
+   * <p/>                                                ________________________________ statedim
+   *             (basecube.cityid=citydim.id)           |
+   * basecube-------------------------------citydim---- |
+   *                                                    |________________________________  zipdim
+   *
+   *                                                       (citydim.zipcode=zipdim.code)
+   *
+   * <p/>
+   * Doing this will reduce the number of joins wherever possible.
+   *
+   * @param chain Joins in Linear format.
+   * @return Joins in Tree format
+   */
+  public JoinTree mergeJoinChains(Map<Aliased<Dimension>, List<SchemaGraph.TableRelationship>> chain) {
+    Map<String, Integer> aliasUsage = new HashMap<String, Integer>();
+    JoinTree root = JoinTree.createRoot();
+    for (Map.Entry<Aliased<Dimension>, List<SchemaGraph.TableRelationship>> entry : chain.entrySet()) {
+      JoinTree current = root;
+      // Last element in this list is link from cube to first dimension
+      for (int i = entry.getValue().size() - 1; i >= 0; i--) {
+        // Adds a child if needed, or returns a child already existing corresponding to the given link.
+        current = current.addChild(entry.getValue().get(i), cubeql, aliasUsage);
+        if (cubeql.getAutoJoinCtx().isPartialJoinChains()) {
+          JoinType joinType = cubeql.getAutoJoinCtx().getTableJoinTypeMap().get(entry.getKey().getObject());
+          //This ensures if (sub)paths are same, but join type is not same, merging will not happen.
+          current.setJoinType(joinType);
+        }
+      }
+      // This is a destination table. Decide alias separately. e.g. chainname
+      // nullcheck is necessary because dimensions can be destinations too. In that case getAlias() == null
+      if (entry.getKey().getAlias() != null) {
+        current.setAlias(entry.getKey().getAlias());
+      }
+    }
+    if (root.getSubtrees().size() > 0) {
+      root.setAlias(cubeql.getAliasForTableName(
+        root.getSubtrees().keySet().iterator().next().getFromTable().getName()));
+    }
+    return root;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/f7ab827e/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index a916159..1385584 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -26,15 +26,12 @@ import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.SchemaGraph.TableRelationship;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
-import org.apache.lens.cube.parse.CubeQueryContext.OptionalDimCtx;
 import org.apache.lens.server.api.error.LensException;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.*;
 
-import lombok.*;
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -43,971 +40,13 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 class JoinResolver implements ContextRewriter {
 
-  @ToString
-  public static class JoinClause implements Comparable<JoinClause> {
-    private final int cost;
-    // all dimensions in path except target
-    private final Set<Dimension> dimsInPath;
-    private CubeQueryContext cubeql;
-    private final Map<Aliased<Dimension>, List<TableRelationship>> chain;
-    private final JoinTree joinTree;
-    transient Map<AbstractCubeTable, Set<String>> chainColumns = new HashMap<AbstractCubeTable, Set<String>>();
-
-    public JoinClause(CubeQueryContext cubeql, Map<Aliased<Dimension>,
-      List<TableRelationship>> chain, Set<Dimension> dimsInPath) {
-      this.cubeql = cubeql;
-      this.chain = chain;
-      this.joinTree = mergeJoinChains(chain);
-      this.cost = joinTree.getNumEdges();
-      this.dimsInPath = dimsInPath;
-    }
-
-    void initChainColumns() {
-      for (List<TableRelationship> path : chain.values()) {
-        for (TableRelationship edge : path) {
-          Set<String> fcols = chainColumns.get(edge.getFromTable());
-          if (fcols == null) {
-            fcols = new HashSet<String>();
-            chainColumns.put(edge.getFromTable(), fcols);
-          }
-          fcols.add(edge.getFromColumn());
-
-          Set<String> tocols = chainColumns.get(edge.getToTable());
-          if (tocols == null) {
-            tocols = new HashSet<String>();
-            chainColumns.put(edge.getToTable(), tocols);
-          }
-          tocols.add(edge.getToColumn());
-        }
-      }
-    }
-
-    public int getCost() {
-      return cost;
-    }
-
-    @Override
-    public int compareTo(JoinClause joinClause) {
-      return cost - joinClause.getCost();
-    }
-
-    /**
-     * Takes chains and merges them in the form of a tree. If two chains have some common path till some table and
-     * bifurcate from there, then in the chain, both paths will have the common path but the resultant tree will have
-     * single path from root(cube) to that table and paths will bifurcate from there.
-     * <p/>
-     * For example, citystate   =   [basecube.cityid=citydim.id], [citydim.stateid=statedim.id]
-     *              cityzip     =   [basecube.cityid=citydim.id], [citydim.zipcode=zipdim.code]
-     * <p/>
-     * Without merging, the behaviour is like this:
-     * <p/>
-     * <p/>
-     *                  (basecube.cityid=citydim.id)          (citydim.stateid=statedim.id)
-     *                  _____________________________citydim____________________________________statedim
-     *                 |
-     *   basecube------|
-     *                 |_____________________________citydim____________________________________zipdim
-     *
-     *                  (basecube.cityid=citydim.id)          (citydim.zipcode=zipdim.code)
-     *
-     * <p/>
-     * Merging will result in a tree like following
-     * <p/>                                                  (citydim.stateid=statedim.id)
-     * <p/>                                                ________________________________ statedim
-     *             (basecube.cityid=citydim.id)           |
-     * basecube-------------------------------citydim---- |
-     *                                                    |________________________________  zipdim
-     *
-     *                                                       (citydim.zipcode=zipdim.code)
-     *
-     * <p/>
-     * Doing this will reduce the number of joins wherever possible.
-     *
-     * @param chain Joins in Linear format.
-     * @return Joins in Tree format
-     */
-    public JoinTree mergeJoinChains(Map<Aliased<Dimension>, List<TableRelationship>> chain) {
-      Map<String, Integer> aliasUsage = new HashMap<String, Integer>();
-      JoinTree root = JoinTree.createRoot();
-      for (Map.Entry<Aliased<Dimension>, List<TableRelationship>> entry : chain.entrySet()) {
-        JoinTree current = root;
-        // Last element in this list is link from cube to first dimension
-        for (int i = entry.getValue().size() - 1; i >= 0; i--) {
-          // Adds a child if needed, or returns a child already existing corresponding to the given link.
-          current = current.addChild(entry.getValue().get(i), cubeql, aliasUsage);
-          if (cubeql.getAutoJoinCtx().partialJoinChains) {
-            JoinType joinType = cubeql.getAutoJoinCtx().tableJoinTypeMap.get(entry.getKey().getObject());
-            //This ensures if (sub)paths are same, but join type is not same, merging will not happen.
-            current.setJoinType(joinType);
-          }
-        }
-        // This is a destination table. Decide alias separately. e.g. chainname
-        // nullcheck is necessary because dimensions can be destinations too. In that case getAlias() == null
-        if (entry.getKey().getAlias() != null) {
-          current.setAlias(entry.getKey().getAlias());
-        }
-      }
-      if (root.getSubtrees().size() > 0) {
-        root.setAlias(cubeql.getAliasForTableName(
-          root.getSubtrees().keySet().iterator().next().getFromTable().getName()));
-      }
-      return root;
-    }
-  }
-
-  @Data
-  @ToString(exclude = "parent")
-  @EqualsAndHashCode(exclude = "parent")
-  public static class JoinTree {
-    //parent of the node
-    JoinTree parent;
-    // current table is parentRelationship.destTable;
-    TableRelationship parentRelationship;
-    // Alias for the join clause
-    String alias;
-    private Map<TableRelationship, JoinTree> subtrees = new LinkedHashMap<TableRelationship, JoinTree>();
-    // Number of nodes from root to this node. depth of root is 0. Unused for now.
-    private int depthFromRoot;
-    // join type of the current table.
-    JoinType joinType;
-
-    public static JoinTree createRoot() {
-      return new JoinTree(null, null, 0);
-    }
-
-    public JoinTree(JoinTree parent, TableRelationship tableRelationship,
-      int depthFromRoot) {
-      this.parent = parent;
-      this.parentRelationship = tableRelationship;
-      this.depthFromRoot = depthFromRoot;
-    }
-
-    public JoinTree addChild(TableRelationship tableRelationship,
-      CubeQueryContext cubeql, Map<String, Integer> aliasUsage) {
-      if (getSubtrees().get(tableRelationship) == null) {
-        JoinTree current = new JoinTree(this, tableRelationship,
-          this.depthFromRoot + 1);
-        // Set alias. Need to compute only when new node is being created.
-        // The following code ensures that For intermediate tables, aliases are given
-        // in the order citydim, citydim_0, citydim_1, ...
-        // And for destination tables, an alias will be decided from here but might be
-        // overridden outside this function.
-        AbstractCubeTable destTable = tableRelationship.getToTable();
-        current.setAlias(cubeql.getAliasForTableName(destTable.getName()));
-        if (aliasUsage.get(current.getAlias()) == null) {
-          aliasUsage.put(current.getAlias(), 0);
-        } else {
-          aliasUsage.put(current.getAlias(), aliasUsage.get(current.getAlias()) + 1);
-          current.setAlias(current.getAlias() + "_" + (aliasUsage.get(current.getAlias()) - 1));
-        }
-        getSubtrees().put(tableRelationship, current);
-      }
-      return getSubtrees().get(tableRelationship);
-    }
-
-    // Recursive computation of number of edges.
-    public int getNumEdges() {
-      int ret = 0;
-      for (JoinTree tree : getSubtrees().values()) {
-        ret += 1;
-        ret += tree.getNumEdges();
-      }
-      return ret;
-    }
-
-    public boolean isLeaf() {
-      return getSubtrees().isEmpty();
-    }
-
-    // Breadth First Traversal. Unused currently.
-    public Iterator<JoinTree> bft() {
-      return new Iterator<JoinTree>() {
-        List<JoinTree> remaining = new ArrayList<JoinTree>() {
-          {
-            addAll(getSubtrees().values());
-          }
-        };
-
-        @Override
-        public boolean hasNext() {
-          return remaining.isEmpty();
-        }
-
-        @Override
-        public JoinTree next() {
-          JoinTree retval = remaining.remove(0);
-          remaining.addAll(retval.getSubtrees().values());
-          return retval;
-        }
-
-        @Override
-        public void remove() {
-          throw new RuntimeException("Not implemented");
-        }
-      };
-    }
-
-    // Depth first traversal of the tree. Used in forming join string.
-    public Iterator<JoinTree> dft() {
-      return new Iterator<JoinTree>() {
-        Stack<JoinTree> joinTreeStack = new Stack<JoinTree>() {
-          {
-            addAll(getSubtrees().values());
-          }
-        };
-
-        @Override
-        public boolean hasNext() {
-          return !joinTreeStack.isEmpty();
-        }
-
-        @Override
-        public JoinTree next() {
-          JoinTree retval = joinTreeStack.pop();
-          joinTreeStack.addAll(retval.getSubtrees().values());
-          return retval;
-        }
-
-        @Override
-        public void remove() {
-          throw new RuntimeException("Not implemented");
-        }
-      };
-    }
-
-    public Set<JoinTree> leaves() {
-      Set<JoinTree> leaves = new HashSet<JoinTree>();
-      Iterator<JoinTree> dft = dft();
-      while (dft.hasNext()) {
-        JoinTree cur = dft.next();
-        if (cur.isLeaf()) {
-          leaves.add(cur);
-        }
-      }
-      return leaves;
-    }
-  }
-
-  /**
-   * Store join chain information resolved by join resolver
-   */
-  public static class AutoJoinContext {
-    // Map of a joined table to list of all possible paths from that table to
-    // the target
-    private final Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths;
-    // User supplied partial join conditions
-    private final Map<AbstractCubeTable, String> partialJoinConditions;
-    // True if the query contains user supplied partial join conditions
-    private final boolean partialJoinChains;
-    // Map of joined table to the join type (if provided by user)
-    private final Map<AbstractCubeTable, JoinType> tableJoinTypeMap;
-
-    // True if joins were resolved automatically
-    private boolean joinsResolved;
-    // Target table for the auto join resolver
-    private final AbstractCubeTable autoJoinTarget;
-    // Configuration string to control join type
-    private String joinTypeCfg;
-
-    // Map of a joined table to its columns which are part of any of the join
-    // paths. This is used in candidate table resolver
-    @Getter
-    private Map<Dimension, Map<AbstractCubeTable, List<String>>> joinPathFromColumns =
-      new HashMap<Dimension, Map<AbstractCubeTable, List<String>>>();
-
-    @Getter
-    private Map<Dimension, Map<AbstractCubeTable, List<String>>> joinPathToColumns =
-      new HashMap<Dimension, Map<AbstractCubeTable, List<String>>>();
-
-    // there can be separate join clause for each fact incase of multi fact queries
-    @Getter
-    Map<CandidateFact, JoinClause> factClauses = new HashMap<CandidateFact, JoinClause>();
-    @Getter
-    @Setter
-    JoinClause minCostClause;
-    private final boolean flattenBridgeTables;
-    private final String bridgeTableFieldAggr;
-
-    public AutoJoinContext(Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths,
-      Map<Dimension, OptionalDimCtx> optionalDimensions, Map<AbstractCubeTable, String> partialJoinConditions,
-      boolean partialJoinChains, Map<AbstractCubeTable, JoinType> tableJoinTypeMap, AbstractCubeTable autoJoinTarget,
-      String joinTypeCfg, boolean joinsResolved, boolean flattenBridgeTables, String bridgeTableFieldAggr) {
-      this.allPaths = allPaths;
-      initJoinPathColumns();
-      this.partialJoinConditions = partialJoinConditions;
-      this.partialJoinChains = partialJoinChains;
-      this.tableJoinTypeMap = tableJoinTypeMap;
-      this.autoJoinTarget = autoJoinTarget;
-      this.joinTypeCfg = joinTypeCfg;
-      this.joinsResolved = joinsResolved;
-      this.flattenBridgeTables = flattenBridgeTables;
-      this.bridgeTableFieldAggr = bridgeTableFieldAggr;
-      log.debug("All join paths:{}", allPaths);
-      log.debug("Join path from columns:{}", joinPathFromColumns);
-      log.debug("Join path to columns:{}", joinPathToColumns);
-    }
-
-    public AbstractCubeTable getAutoJoinTarget() {
-      return autoJoinTarget;
-    }
-
-    private JoinClause getJoinClause(CandidateFact fact) {
-      if (fact == null) {
-        return minCostClause;
-      }
-      return factClauses.get(fact);
-    }
-
-    // Populate map of tables to their columns which are present in any of the
-    // join paths
-    private void initJoinPathColumns() {
-      for (List<SchemaGraph.JoinPath> paths : allPaths.values()) {
-        for (int i = 0; i < paths.size(); i++) {
-          SchemaGraph.JoinPath jp = paths.get(i);
-          jp.initColumnsForTable();
-        }
-      }
-      refreshJoinPathColumns();
-    }
-
-    public void refreshJoinPathColumns() {
-      joinPathFromColumns.clear();
-      joinPathToColumns.clear();
-      for (Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> joinPathEntry : allPaths.entrySet()) {
-        List<SchemaGraph.JoinPath> joinPaths = joinPathEntry.getValue();
-        Map<AbstractCubeTable, List<String>> fromColPaths = joinPathFromColumns.get(joinPathEntry.getKey().getObject());
-        Map<AbstractCubeTable, List<String>> toColPaths = joinPathToColumns.get(joinPathEntry.getKey().getObject());
-        if (fromColPaths == null) {
-          fromColPaths = new HashMap<AbstractCubeTable, List<String>>();
-          joinPathFromColumns.put(joinPathEntry.getKey().getObject(), fromColPaths);
-        }
-
-        if (toColPaths == null) {
-          toColPaths = new HashMap<AbstractCubeTable, List<String>>();
-          joinPathToColumns.put(joinPathEntry.getKey().getObject(), toColPaths);
-        }
-        populateJoinPathCols(joinPaths, fromColPaths, toColPaths);
-      }
-    }
-
-    private void populateJoinPathCols(List<SchemaGraph.JoinPath> joinPaths,
-      Map<AbstractCubeTable, List<String>> fromPathColumns, Map<AbstractCubeTable, List<String>> toPathColumns) {
-      for (SchemaGraph.JoinPath path : joinPaths) {
-        for (TableRelationship edge : path.getEdges()) {
-          AbstractCubeTable fromTable = edge.getFromTable();
-          String fromColumn = edge.getFromColumn();
-          List<String> columnsOfFromTable = fromPathColumns.get(fromTable);
-          if (columnsOfFromTable == null) {
-            columnsOfFromTable = new ArrayList<String>();
-            fromPathColumns.put(fromTable, columnsOfFromTable);
-          }
-          columnsOfFromTable.add(fromColumn);
-
-          // Similarly populate for the 'to' table
-          AbstractCubeTable toTable = edge.getToTable();
-          String toColumn = edge.getToColumn();
-          List<String> columnsOfToTable = toPathColumns.get(toTable);
-          if (columnsOfToTable == null) {
-            columnsOfToTable = new ArrayList<String>();
-            toPathColumns.put(toTable, columnsOfToTable);
-          }
-          columnsOfToTable.add(toColumn);
-        }
-      }
-    }
-
-    public void removeJoinedTable(Dimension dim) {
-      allPaths.remove(Aliased.create(dim));
-      joinPathFromColumns.remove(dim);
-    }
-
-    public Map<AbstractCubeTable, String> getPartialJoinConditions() {
-      return partialJoinConditions;
-    }
-
-    public String getFromString(String fromTable, CandidateFact fact, Set<Dimension> qdims,
-      Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql) throws LensException {
-      String fromString = fromTable;
-      log.info("All paths dump:{}", cubeql.getAutoJoinCtx().getAllPaths());
-      if (qdims == null || qdims.isEmpty()) {
-        return fromString;
-      }
-      // Compute the merged join clause string for the min cost joinclause
-      String clause = getMergedJoinClause(cubeql, cubeql.getAutoJoinCtx().getJoinClause(fact), dimsToQuery);
-
-      fromString += clause;
-      return fromString;
-    }
-
-    // Some refactoring needed to account for multiple join paths
-    public String getMergedJoinClause(CubeQueryContext cubeql, JoinClause joinClause,
-                                      Map<Dimension, CandidateDim> dimsToQuery) {
-      Set<String> clauses = new LinkedHashSet<String>();
-      String joinTypeStr = "";
-      JoinType joinType = JoinType.INNER;
-
-      // this flag is set to true if user has specified a partial join chain
-      if (!partialJoinChains) {
-        // User has not specified any join conditions. In this case, we rely on
-        // configuration for the join type
-        if (StringUtils.isNotBlank(joinTypeCfg)) {
-          joinType = JoinType.valueOf(joinTypeCfg.toUpperCase());
-          joinTypeStr = getJoinTypeStr(joinType);
-        }
-      }
-
-      Iterator<JoinTree> iter = joinClause.joinTree.dft();
-      boolean hasBridgeTable = false;
-      boolean initedBridgeClauses = false;
-      StringBuilder bridgeSelectClause = new StringBuilder();
-      StringBuilder bridgeFromClause = new StringBuilder();
-      StringBuilder bridgeFilterClause = new StringBuilder();
-      StringBuilder bridgeJoinClause = new StringBuilder();
-      StringBuilder bridgeGroupbyClause = new StringBuilder();
-
-      while (iter.hasNext()) {
-        JoinTree cur = iter.next();
-        if (partialJoinChains) {
-          joinType = cur.getJoinType();
-          joinTypeStr = getJoinTypeStr(joinType);
-        }
-        TableRelationship rel = cur.parentRelationship;
-        String toAlias, fromAlias;
-        fromAlias = cur.parent.getAlias();
-        toAlias = cur.getAlias();
-        hasBridgeTable = flattenBridgeTables && (hasBridgeTable || rel.isMapsToMany());
-        // We have to push user specified filters for the joined tables
-        String userFilter = null;
-        // Partition condition on the tables also needs to be pushed depending
-        // on the join
-        String storageFilter = null;
-
-        if (JoinType.INNER == joinType || JoinType.LEFTOUTER == joinType || JoinType.LEFTSEMI == joinType) {
-          // For inner and left joins push filter of right table
-          userFilter = partialJoinConditions.get(rel.getToTable());
-          if (partialJoinConditions.containsKey(rel.getFromTable())) {
-            if (StringUtils.isNotBlank(userFilter)) {
-              userFilter += (" AND " + partialJoinConditions.get(rel.getFromTable()));
-            } else {
-              userFilter = partialJoinConditions.get(rel.getFromTable());
-            }
-          }
-          storageFilter = getStorageFilter(dimsToQuery, rel.getToTable(), toAlias);
-          dimsToQuery.get(rel.getToTable()).setWhereClauseAdded();
-        } else if (JoinType.RIGHTOUTER == joinType) {
-          // For right outer joins, push filters of left table
-          userFilter = partialJoinConditions.get(rel.getFromTable());
-          if (partialJoinConditions.containsKey(rel.getToTable())) {
-            if (StringUtils.isNotBlank(userFilter)) {
-              userFilter += (" AND " + partialJoinConditions.get(rel.getToTable()));
-            } else {
-              userFilter = partialJoinConditions.get(rel.getToTable());
-            }
-          }
-          if (rel.getFromTable() instanceof Dimension) {
-            storageFilter = getStorageFilter(dimsToQuery, rel.getFromTable(), fromAlias);
-            dimsToQuery.get(rel.getFromTable()).setWhereClauseAdded();
-          }
-        } else if (JoinType.FULLOUTER == joinType) {
-          // For full outer we need to push filters of both left and right
-          // tables in the join clause
-          String leftFilter = null, rightFilter = null;
-          String leftStorageFilter = null, rightStorgeFilter = null;
-
-          if (StringUtils.isNotBlank(partialJoinConditions.get(rel.getFromTable()))) {
-            leftFilter = partialJoinConditions.get(rel.getFromTable()) + " and ";
-          }
-
-          if (rel.getFromTable() instanceof Dimension) {
-            leftStorageFilter = getStorageFilter(dimsToQuery, rel.getFromTable(), fromAlias);
-            if (StringUtils.isNotBlank((leftStorageFilter))) {
-              dimsToQuery.get(rel.getFromTable()).setWhereClauseAdded();
-            }
-          }
-
-          if (StringUtils.isNotBlank(partialJoinConditions.get(rel.getToTable()))) {
-            rightFilter = partialJoinConditions.get(rel.getToTable());
-          }
-
-          rightStorgeFilter = getStorageFilter(dimsToQuery, rel.getToTable(), toAlias);
-          if (StringUtils.isNotBlank(rightStorgeFilter)) {
-            if (StringUtils.isNotBlank((leftStorageFilter))) {
-              leftStorageFilter += " and ";
-            }
-            dimsToQuery.get(rel.getToTable()).setWhereClauseAdded();
-          }
-
-          userFilter = (leftFilter == null ? "" : leftFilter) + (rightFilter == null ? "" : rightFilter);
-          storageFilter =
-            (leftStorageFilter == null ? "" : leftStorageFilter)
-              + (rightStorgeFilter == null ? "" : rightStorgeFilter);
-        }
-        StringBuilder clause = new StringBuilder();
-
-        // if a bridge table is present in the path
-        if (hasBridgeTable) {
-          // if any relation has bridge table, the clause becomes the following :
-          // join (" select " + joinkey + " aggr over fields from bridge table + from bridgeTable + [where user/storage
-          // filters] + groupby joinkey) on joincond"
-          // Or
-          // " join (select " + joinkey + " aggr over fields from table reached through bridge table + from bridge table
-          // join <next tables> on join condition + [and user/storage filters] + groupby joinkey) on joincond
-          if (!initedBridgeClauses) {
-            // we just found a bridge table in the path we need to initialize the clauses for subquery required for
-            // aggregating fields of bridge table
-            // initiliaze select clause with join key
-            bridgeSelectClause.append(" (select ").append(toAlias).append(".").append(rel.getToColumn()).append(" as ")
-            .append(rel.getToColumn());
-            // group by join key
-            bridgeGroupbyClause.append(" group by ").append(toAlias).append(".").append(rel.getToColumn());
-            // from clause with bridge table
-            bridgeFromClause.append(" from ").append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
-            // we need to initialize filter clause with user filter clause or storgae filter if applicable
-            if (StringUtils.isNotBlank(userFilter)) {
-              bridgeFilterClause.append(userFilter);
-            }
-            if (StringUtils.isNotBlank(storageFilter)) {
-              if (StringUtils.isNotBlank(bridgeFilterClause.toString())) {
-                bridgeFilterClause.append(" and ");
-              }
-              bridgeFilterClause.append(storageFilter);
-            }
-            // initialize final join clause
-            bridgeJoinClause.append(" on ").append(fromAlias).append(".")
-              .append(rel.getFromColumn()).append(" = ").append("%s")
-              .append(".").append(rel.getToColumn());
-            initedBridgeClauses = true;
-          } else {
-            // if bridge clauses are already inited, this is a next table getting joined with bridge table
-            // we will append a simple join clause
-            bridgeFromClause.append(joinTypeStr).append(" join ");
-            bridgeFromClause.append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
-            bridgeFromClause.append(" on ").append(fromAlias).append(".")
-              .append(rel.getFromColumn()).append(" = ").append(toAlias)
-              .append(".").append(rel.getToColumn());
-
-            if (StringUtils.isNotBlank(userFilter)) {
-              bridgeFromClause.append(" and ").append(userFilter);
-            }
-            if (StringUtils.isNotBlank(storageFilter)) {
-              bridgeFromClause.append(" and ").append(storageFilter);
-            }
-          }
-          if (cubeql.getTblAliasToColumns().get(toAlias) != null
-            && !cubeql.getTblAliasToColumns().get(toAlias).isEmpty()) {
-            // there are fields selected from this table after seeing bridge table in path
-            // we should make subquery for this selection
-            clause.append(joinTypeStr).append(" join ");
-            clause.append(bridgeSelectClause.toString());
-            for (String col : cubeql.getTblAliasToColumns().get(toAlias)) {
-              clause.append(",").append(bridgeTableFieldAggr).append("(").append(toAlias)
-                .append(".").append(col)
-                .append(")")
-                .append(" as ").append(col);
-            }
-            String bridgeFrom = bridgeFromClause.toString();
-            clause.append(bridgeFrom);
-            String bridgeFilter = bridgeFilterClause.toString();
-            if (StringUtils.isNotBlank(bridgeFilter)) {
-              if (bridgeFrom.contains(" join ")) {
-                clause.append(" and ");
-              } else {
-                clause.append(" where");
-              }
-              clause.append(bridgeFilter.toString());
-            }
-            clause.append(bridgeGroupbyClause.toString());
-            clause.append(") ").append(toAlias);
-            clause.append(String.format(bridgeJoinClause.toString(), toAlias));
-            clauses.add(clause.toString());
-          }
-          if (cur.getSubtrees().isEmpty()) {
-            // clear bridge flags and builders, as there are no more clauses in this tree.
-            hasBridgeTable = false;
-            initedBridgeClauses = false;
-            bridgeSelectClause.setLength(0);
-            bridgeFromClause.setLength(0);
-            bridgeFilterClause.setLength(0);
-            bridgeJoinClause.setLength(0);
-            bridgeGroupbyClause.setLength(0);
-          }
-        } else {
-          // Simple join clause is :
-          // jointype + " join " + destTable + " on " + joincond + [" and" + userfilter] + ["and" + storageFilter]
-          clause.append(joinTypeStr).append(" join ");
-          //Add storage table name followed by alias
-          clause.append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
-          clause.append(" on ").append(fromAlias).append(".")
-            .append(rel.getFromColumn()).append(" = ").append(toAlias)
-            .append(".").append(rel.getToColumn());
-
-          if (StringUtils.isNotBlank(userFilter)) {
-            clause.append(" and ").append(userFilter);
-          }
-          if (StringUtils.isNotBlank(storageFilter)) {
-            clause.append(" and ").append(storageFilter);
-          }
-          clauses.add(clause.toString());
-        }
-      }
-      return StringUtils.join(clauses, "");
-    }
-
-    public Set<Dimension> getDimsOnPath(Map<Aliased<Dimension>, List<TableRelationship>> joinChain,
-      Set<Dimension> qdims) {
-      Set<Dimension> dimsOnPath = new HashSet<Dimension>();
-      for (Map.Entry<Aliased<Dimension>, List<TableRelationship>> entry : joinChain.entrySet()) {
-        List<TableRelationship> chain = entry.getValue();
-        Dimension table = entry.getKey().getObject();
-
-        // check if join with this dimension is required
-        if (!qdims.contains(table)) {
-          continue;
-        }
-
-        for (int i = chain.size() - 1; i >= 0; i--) {
-          TableRelationship rel = chain.get(i);
-          dimsOnPath.add((Dimension) rel.getToTable());
-        }
-      }
-      return dimsOnPath;
-    }
-
-    private String getStorageFilter(Map<Dimension, CandidateDim> dimsToQuery, AbstractCubeTable table, String alias) {
-      String whereClause = "";
-      if (dimsToQuery != null && dimsToQuery.get(table) != null) {
-        if (StringUtils.isNotBlank(dimsToQuery.get(table).getWhereClause())) {
-          whereClause = dimsToQuery.get(table).getWhereClause();
-          if (alias != null) {
-            whereClause = StorageUtil.getWhereClause(whereClause, alias);
-          }
-        }
-      }
-      return whereClause;
-    }
-
-    /**
-     * @return the joinsResolved
-     */
-    public boolean isJoinsResolved() {
-      return joinsResolved;
-    }
-
-    // Includes both queried join paths and optional join paths
-    public Set<String> getAllJoinPathColumnsOfTable(AbstractCubeTable table) {
-      Set<String> allPaths = new HashSet<String>();
-      for (Map<AbstractCubeTable, List<String>> optPaths : joinPathFromColumns.values()) {
-        if (optPaths.get(table) != null) {
-          allPaths.addAll(optPaths.get(table));
-        }
-      }
-
-      for (Map<AbstractCubeTable, List<String>> optPaths : joinPathToColumns.values()) {
-        if (optPaths.get(table) != null) {
-          allPaths.addAll(optPaths.get(table));
-        }
-      }
-
-      return allPaths;
-    }
-
-    public void pruneAllPaths(CubeInterface cube, final Set<CandidateFact> cfacts,
-      final Map<Dimension, CandidateDim> dimsToQuery) {
-      // Remove join paths which cannot be satisfied by the resolved candidate
-      // fact and dimension tables
-      if (cfacts != null) {
-        // include columns from all picked facts
-        Set<String> factColumns = new HashSet<String>();
-        for (CandidateFact cfact : cfacts) {
-          factColumns.addAll(cfact.getColumns());
-        }
-
-        for (List<SchemaGraph.JoinPath> paths : allPaths.values()) {
-          for (int i = 0; i < paths.size(); i++) {
-            SchemaGraph.JoinPath jp = paths.get(i);
-            List<String> cubeCols = jp.getColumnsForTable((AbstractCubeTable) cube);
-            if (cubeCols != null && !factColumns.containsAll(cubeCols)) {
-              // This path requires some columns from the cube which are not
-              // present in the candidate fact
-              // Remove this path
-              log.info("Removing join path:{} as columns :{} dont exist", jp, cubeCols);
-              paths.remove(i);
-              i--;
-            }
-          }
-        }
-        pruneEmptyPaths(allPaths);
-      }
-      pruneAllPaths(dimsToQuery);
-    }
-
-    /**
-     * Prunes allPaths by removing paths which contain columns that are not present in any candidate dims.
-     *
-     * @param candidateDims
-     */
-    public void pruneAllPathsForCandidateDims(Map<Dimension, Set<CandidateDim>> candidateDims) {
-      Map<Dimension, Set<String>> dimColumns = new HashMap<Dimension, Set<String>>();
-      // populate all columns present in candidate dims for each dimension
-      for (Map.Entry<Dimension, Set<CandidateDim>> entry : candidateDims.entrySet()) {
-        Dimension dim = entry.getKey();
-        Set<String> allColumns = new HashSet<String>();
-        for (CandidateDim cdim : entry.getValue()) {
-          allColumns.addAll(cdim.getColumns());
-        }
-        dimColumns.put(dim, allColumns);
-      }
-      for (List<SchemaGraph.JoinPath> paths : allPaths.values()) {
-        for (int i = 0; i < paths.size(); i++) {
-          SchemaGraph.JoinPath jp = paths.get(i);
-          for (AbstractCubeTable refTable : jp.getAllTables()) {
-            List<String> cols = jp.getColumnsForTable(refTable);
-            if (refTable instanceof Dimension) {
-              if (cols != null && (dimColumns.get(refTable) == null || !dimColumns.get(refTable).containsAll(cols))) {
-                // This path requires some columns from the cube which are not present in any candidate dim
-                // Remove this path
-                log.info("Removing join path:{} as columns :{} dont exist", jp, cols);
-                paths.remove(i);
-                i--;
-                break;
-              }
-            }
-          }
-        }
-      }
-      pruneEmptyPaths(allPaths);
-    }
-
-    private void pruneEmptyPaths(Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths) {
-      Iterator<Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>>> iter = allPaths.entrySet().iterator();
-      while (iter.hasNext()) {
-        Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> entry = iter.next();
-        if (entry.getValue().isEmpty()) {
-          iter.remove();
-        }
-      }
-    }
-
-    private Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> pruneFactPaths(CubeInterface cube,
-      final CandidateFact cfact) {
-      Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> prunedPaths
-        = new HashMap<Aliased<Dimension>, List<SchemaGraph.JoinPath>>();
-      // Remove join paths which cannot be satisfied by the candidate fact
-      for (Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> ppaths : allPaths.entrySet()) {
-        prunedPaths.put(ppaths.getKey(), new ArrayList<SchemaGraph.JoinPath>(ppaths.getValue()));
-        List<SchemaGraph.JoinPath> paths = prunedPaths.get(ppaths.getKey());
-        for (int i = 0; i < paths.size(); i++) {
-          SchemaGraph.JoinPath jp = paths.get(i);
-          List<String> cubeCols = jp.getColumnsForTable((AbstractCubeTable) cube);
-          if (cubeCols != null && !cfact.getColumns().containsAll(cubeCols)) {
-            // This path requires some columns from the cube which are not
-            // present in the candidate fact
-            // Remove this path
-            log.info("Removing join path:{} as columns :{} dont exist", jp, cubeCols);
-            paths.remove(i);
-            i--;
-          }
-        }
-      }
-      pruneEmptyPaths(prunedPaths);
-      return prunedPaths;
-    }
-
-    private void pruneAllPaths(final Map<Dimension, CandidateDim> dimsToQuery) {
-      // Remove join paths which cannot be satisfied by the resolved dimension
-      // tables
-      if (dimsToQuery != null && !dimsToQuery.isEmpty()) {
-        for (CandidateDim candidateDim : dimsToQuery.values()) {
-          Set<String> dimCols = candidateDim.dimtable.getAllFieldNames();
-          for (List<SchemaGraph.JoinPath> paths : allPaths.values()) {
-            for (int i = 0; i < paths.size(); i++) {
-              SchemaGraph.JoinPath jp = paths.get(i);
-              List<String> candidateDimCols = jp.getColumnsForTable(candidateDim.getBaseTable());
-              if (candidateDimCols != null && !dimCols.containsAll(candidateDimCols)) {
-                // This path requires some columns from the dimension which are
-                // not present in the candidate dim
-                // Remove this path
-                log.info("Removing join path:{} as columns :{} dont exist", jp, candidateDimCols);
-                paths.remove(i);
-                i--;
-              }
-            }
-          }
-        }
-        pruneEmptyPaths(allPaths);
-      }
-    }
-
-    /**
-     * There can be multiple join paths between a dimension and the target. Set of all possible join clauses is the
-     * cartesian product of join paths of all dimensions
-     */
-    private Iterator<JoinClause> getJoinClausesForAllPaths(final CandidateFact fact,
-      final Set<Dimension> qdims, final CubeQueryContext cubeql) {
-      Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths;
-      // if fact is passed only look at paths possible from fact to dims
-      if (fact != null) {
-        allPaths = pruneFactPaths(cubeql.getCube(), fact);
-      } else {
-        allPaths = new LinkedHashMap<Aliased<Dimension>, List<SchemaGraph.JoinPath>>(this.allPaths);
-      }
-      // prune allPaths with qdims
-      log.info("pruning allPaths before generating all permutations.");
-      log.info("allPaths: {}", allPaths);
-      log.info("qdims: {}", qdims);
-      pruneAllPathsWithQueriedDims(allPaths, qdims);
-
-      // Number of paths in each path set
-      final int[] groupSizes = new int[allPaths.values().size()];
-      // Total number of elements in the cartesian product
-      int numSamples = 1;
-      // All path sets
-      final List<List<SchemaGraph.JoinPath>> pathSets = new ArrayList<List<SchemaGraph.JoinPath>>();
-      // Dimension corresponding to the path sets
-      final List<Aliased<Dimension>> dimensions = new ArrayList<Aliased<Dimension>>(groupSizes.length);
-
-      int i = 0;
-      for (Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> entry : allPaths.entrySet()) {
-        dimensions.add(entry.getKey());
-        List<SchemaGraph.JoinPath> group = entry.getValue();
-        pathSets.add(group);
-        groupSizes[i] = group.size();
-        numSamples *= groupSizes[i];
-        i++;
-      }
-
-      final int[] selection = new int[groupSizes.length];
-      final int MAX_SAMPLE_COUNT = numSamples;
-
-      // Return a lazy iterator over all possible join chains
-      return new Iterator<JoinClause>() {
-        int sample = 0;
-
-        @Override
-        public boolean hasNext() {
-          return sample < MAX_SAMPLE_COUNT;
-        }
-
-        @Override
-        public JoinClause next() {
-          Map<Aliased<Dimension>, List<TableRelationship>> chain
-            = new LinkedHashMap<Aliased<Dimension>, List<TableRelationship>>();
-          //generate next permutation.
-          for (int i = groupSizes.length - 1, base = sample; i >= 0; base /= groupSizes[i], i--) {
-            selection[i] = base % groupSizes[i];
-          }
-          for (int i = 0; i < selection.length; i++) {
-            int selectedPath = selection[i];
-            List<TableRelationship> path = pathSets.get(i).get(selectedPath).getEdges();
-            chain.put(dimensions.get(i), path);
-          }
-
-          Set<Dimension> dimsOnPath = getDimsOnPath(chain, qdims);
-
-          sample++;
-          // Cost of join = number of tables joined in the clause
-          return new JoinClause(cubeql, chain, dimsOnPath);
-        }
-
-        @Override
-        public void remove() {
-          throw new UnsupportedOperationException("Cannot remove elements!");
-        }
-      };
-    }
-
-    /**
-     * Given allPaths, it will remove entries where key is a non-join chain dimension and not contained in qdims
-     *
-     * @param allPaths
-     * @param qdims
-     */
-    private void pruneAllPathsWithQueriedDims(Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths,
-      Set<Dimension> qdims) {
-      Iterator<Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>>> iter = allPaths.entrySet().iterator();
-      while (iter.hasNext()) {
-        Map.Entry<Aliased<Dimension>, List<SchemaGraph.JoinPath>> cur = iter.next();
-        if (!qdims.contains(cur.getKey().getObject())) {
-          log.info("removing from allPaths: {}", cur);
-          iter.remove();
-        }
-      }
-    }
-
-    public Set<Dimension> pickOptionalTables(final CandidateFact fact,
-      Set<Dimension> qdims, CubeQueryContext cubeql) throws LensException {
-      // Find the min cost join clause and add dimensions in the clause as optional dimensions
-      Set<Dimension> joiningOptionalTables = new HashSet<Dimension>();
-      if (qdims == null) {
-        return joiningOptionalTables;
-      }
-      // find least cost path
-      Iterator<JoinClause> itr = getJoinClausesForAllPaths(fact, qdims, cubeql);
-      JoinClause minCostClause = null;
-      while (itr.hasNext()) {
-        JoinClause clause = itr.next();
-        if (minCostClause == null || minCostClause.getCost() > clause.getCost()) {
-          minCostClause = clause;
-        }
-      }
-
-      if (minCostClause == null) {
-        throw new LensException(LensCubeErrorCode.NO_JOIN_PATH.getLensErrorInfo(),
-            qdims.toString(), autoJoinTarget.getName());
-      }
-
-      log.info("Fact: {} minCostClause:{}", fact, minCostClause);
-      if (fact != null) {
-        cubeql.getAutoJoinCtx().getFactClauses().put(fact, minCostClause);
-      } else {
-        cubeql.getAutoJoinCtx().setMinCostClause(minCostClause);
-      }
-      for (Dimension dim : minCostClause.dimsInPath) {
-        if (!qdims.contains(dim)) {
-          joiningOptionalTables.add(dim);
-        }
-      }
-
-      minCostClause.initChainColumns();
-      // prune candidate dims of joiningOptionalTables wrt joinging columns
-      for (Dimension dim : joiningOptionalTables) {
-        for (Iterator<CandidateDim> i = cubeql.getCandidateDimTables().get(dim).iterator(); i.hasNext();) {
-          CandidateDim cdim = i.next();
-          CubeDimensionTable dimtable = cdim.dimtable;
-          if (!cdim.getColumns().containsAll(minCostClause.chainColumns.get(dim))) {
-            i.remove();
-            log.info("Not considering dimtable:{} as its columns are not part of any join paths. Join columns:{}",
-              dimtable, minCostClause.chainColumns.get(dim));
-            cubeql.addDimPruningMsgs(dim, cdim.dimtable,
-              CandidateTablePruneCause.noColumnPartOfAJoinPath(minCostClause.chainColumns.get(dim)));
-          }
-        }
-        if (cubeql.getCandidateDimTables().get(dim).size() == 0) {
-          throw new LensException(LensCubeErrorCode.NO_DIM_HAS_COLUMN.getLensErrorInfo(), dim.getName(),
-            minCostClause.chainColumns.get(dim).toString());
-        }
-      }
-
-      return joiningOptionalTables;
-    }
-
-    public Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> getAllPaths() {
-      return allPaths;
-    }
-
-    public boolean isReachableDim(Dimension dim) {
-      Aliased<Dimension> aliased = Aliased.create(dim);
-      return isReachableDim(aliased);
-    }
-
-    public boolean isReachableDim(Dimension dim, String alias) {
-      Aliased<Dimension> aliased = Aliased.create(dim, alias);
-      return isReachableDim(aliased);
-    }
+  private Map<AbstractCubeTable, String> partialJoinConditions;
+  private Map<AbstractCubeTable, JoinType> tableJoinTypeMap;
+  private boolean partialJoinChain;
+  private AbstractCubeTable target;
+  private HashMap<Dimension, List<JoinChain>> dimensionInJoinChain = new HashMap<Dimension, List<JoinChain>>();
 
-    private boolean isReachableDim(Aliased<Dimension> aliased) {
-      return allPaths.containsKey(aliased) && !allPaths.get(aliased).isEmpty();
-    }
+  public JoinResolver(Configuration conf) {
   }
 
   static String getJoinTypeStr(JoinType joinType) {
@@ -1032,15 +71,6 @@ class JoinResolver implements ContextRewriter {
     }
   }
 
-  private Map<AbstractCubeTable, String> partialJoinConditions;
-  private Map<AbstractCubeTable, JoinType> tableJoinTypeMap;
-  private boolean partialJoinChain;
-  private AbstractCubeTable target;
-  private HashMap<Dimension, List<JoinChain>> dimensionInJoinChain = new HashMap<Dimension, List<JoinChain>>();
-
-  public JoinResolver(Configuration conf) {
-  }
-
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws LensException {
     partialJoinConditions = new HashMap<AbstractCubeTable, String>();


[24/50] [abbrv] lens git commit: LENS-881 : Remove hive-metastore jar from lens-ship-jars

Posted by sh...@apache.org.
LENS-881 : Remove hive-metastore jar from lens-ship-jars


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/5d79ecfd
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/5d79ecfd
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/5d79ecfd

Branch: refs/heads/LENS-581
Commit: 5d79ecfdbce74bd5b1f77911f200af1b7d76a01a
Parents: b71be2d
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Thu Nov 26 10:25:10 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Thu Nov 26 10:25:10 2015 +0530

----------------------------------------------------------------------
 lens-ship-jars/pom.xml | 6 ------
 1 file changed, 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/5d79ecfd/lens-ship-jars/pom.xml
----------------------------------------------------------------------
diff --git a/lens-ship-jars/pom.xml b/lens-ship-jars/pom.xml
index 6268445..b150980 100644
--- a/lens-ship-jars/pom.xml
+++ b/lens-ship-jars/pom.xml
@@ -52,11 +52,6 @@
       <groupId>net.sf.opencsv</groupId>
       <artifactId>opencsv</artifactId>
     </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-metastore</artifactId>
-      <scope>compile</scope>
-    </dependency>
   </dependencies>
 
   <build>
@@ -72,7 +67,6 @@
               <include>org.apache.lens:lens-storage-db</include>
               <include>org.apache.lens:lens-cube</include>
               <include>org.apache.lens:lens-query-lib</include>
-              <include>org.apache.hive:hive-metastore</include>
               <include>net.sf.opencsv:opencsv</include>
             </includes>
           </artifactSet>


[44/50] [abbrv] lens git commit: LENS-903 : No candidate dim available exception should contain only brief error - added missing file

Posted by sh...@apache.org.
LENS-903 : No candidate dim available exception should contain only brief error - added missing file


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/d6aeecc7
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/d6aeecc7
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/d6aeecc7

Branch: refs/heads/LENS-581
Commit: d6aeecc7306d9cd37dacd53caa0dfd842ca48bdc
Parents: b84cb2c
Author: Sushil Mohanty <su...@apache.org>
Authored: Thu Dec 17 18:22:26 2015 +0530
Committer: Sushil Mohanty <su...@apache.org>
Committed: Thu Dec 17 18:22:26 2015 +0530

----------------------------------------------------------------------
 .../error/NoCandidateDimAvailableException.java | 47 ++++++++++++++++++++
 1 file changed, 47 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/d6aeecc7/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateDimAvailableException.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateDimAvailableException.java b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateDimAvailableException.java
new file mode 100644
index 0000000..ef76dc6
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateDimAvailableException.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.error;
+
+import org.apache.lens.cube.metadata.CubeDimensionTable;
+import org.apache.lens.cube.parse.PruneCauses;
+import org.apache.lens.server.api.error.LensException;
+
+public class NoCandidateDimAvailableException extends LensException {
+
+  private final PruneCauses<CubeDimensionTable> briefAndDetailedError;
+
+  public NoCandidateDimAvailableException(PruneCauses<CubeDimensionTable> briefAndDetailedError) {
+    super(LensCubeErrorCode.NO_CANDIDATE_DIM_AVAILABLE.getLensErrorInfo(), briefAndDetailedError.getBriefCause());
+    this.briefAndDetailedError = briefAndDetailedError;
+  }
+
+  public PruneCauses.BriefAndDetailedError getJsonMessage() {
+    return briefAndDetailedError.toJsonObject();
+  }
+
+  @Override
+  public int compareTo(LensException e) {
+    //Compare the max CandidateTablePruneCode coming from different instances.
+    if (e instanceof NoCandidateDimAvailableException) {
+      return briefAndDetailedError.getMaxCause().compareTo(
+              ((NoCandidateDimAvailableException) e).briefAndDetailedError.getMaxCause());
+    }
+    return super.compareTo(e);
+  }
+}


[41/50] [abbrv] lens git commit: LENS-887 : Add exception handling over event process threads and increase pool size for QueryEndNotifier and ResultFormatter

Posted by sh...@apache.org.
LENS-887 : Add exception handling over event process threads and increase pool size for QueryEndNotifier and ResultFormatter


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/73f92430
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/73f92430
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/73f92430

Branch: refs/heads/LENS-581
Commit: 73f92430c70664cf5b8c63ec9b174a4a1b27d2ad
Parents: 36166a2
Author: Puneet Gupta <pu...@gmail.com>
Authored: Tue Dec 15 18:22:40 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Dec 15 18:22:40 2015 +0530

----------------------------------------------------------------------
 .../server/api/events/AsyncEventListener.java   | 50 +++++++++-----
 .../apache/lens/server/EventServiceImpl.java    |  9 ++-
 .../lens/server/query/QueryEndNotifier.java     | 72 +++++++++++---------
 .../lens/server/query/ResultFormatter.java      |  5 ++
 .../lens/server/query/TestEventService.java     | 45 ++++++++++++
 5 files changed, 131 insertions(+), 50 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/73f92430/lens-server-api/src/main/java/org/apache/lens/server/api/events/AsyncEventListener.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/events/AsyncEventListener.java b/lens-server-api/src/main/java/org/apache/lens/server/api/events/AsyncEventListener.java
index 547c008..84728e5 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/events/AsyncEventListener.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/events/AsyncEventListener.java
@@ -22,12 +22,18 @@ import java.util.concurrent.*;
 
 import org.apache.lens.server.api.error.LensException;
 
+import org.apache.commons.lang3.concurrent.BasicThreadFactory;
+
+import lombok.AccessLevel;
+import lombok.Getter;
+import lombok.extern.slf4j.Slf4j;
 /**
  * Event listeners should implement this class if they wish to process events asynchronously. This should be used when
  * event processing can block, or is computationally intensive.
  *
  * @param <T> the generic type
  */
+@Slf4j
 public abstract class AsyncEventListener<T extends LensEvent> implements LensEventListener<T> {
 
   /**
@@ -41,49 +47,57 @@ public abstract class AsyncEventListener<T extends LensEvent> implements LensEve
   protected final BlockingQueue<Runnable> eventQueue;
 
   /**
+   * Name of this Asynchronous Event Listener. Will be used for logging and to name the threads in thread pool that
+   * allow asynchronous handling of events. If required, Sub Classes can override <code>getName</code> method to
+   * provide more appropriate name.
+   *
+   * Default value is the class Name (Example QueryEndNotifier, ResultFormatter, etc)
+   */
+  @Getter(AccessLevel.PROTECTED)
+  private final String name = this.getClass().getSimpleName();
+
+  /**
    * Create a single threaded event listener with an unbounded queue, with daemon threads.
    */
   public AsyncEventListener() {
-    this(1);
+    this(1, 1);
   }
 
   /**
    * Create a event listener with poolSize threads with an unbounded queue and daemon threads.
    *
    * @param poolSize the pool size
+   * @param maxPoolSize the max pool size
    */
-  public AsyncEventListener(int poolSize) {
-    this(poolSize, -1, 10, true);
+  public AsyncEventListener(int poolSize, int maxPoolSize) {
+    this(poolSize, maxPoolSize, -1, 10, true);
   }
 
   /**
    * Create an asynchronous event listener which uses a thread poool to process events.
    *
    * @param poolSize       size of the event processing pool
+   * @param maxPoolSize    the max pool size
    * @param maxQueueSize   max size of the event queue, if this is non positive, then the queue is unbounded
    * @param timeOutSeconds time out in seconds when an idle thread is destroyed
    * @param isDaemon       if the threads used to process should be daemon threads,
    *                       if false, then implementation should call stop()
    *                       to stop the thread pool
    */
-  public AsyncEventListener(int poolSize, int maxQueueSize, long timeOutSeconds, final boolean isDaemon) {
+  public AsyncEventListener(int poolSize, int maxPoolSize, int maxQueueSize, long timeOutSeconds,
+      final boolean isDaemon) {
     if (maxQueueSize <= 0) {
       eventQueue = new LinkedBlockingQueue<Runnable>();
     } else {
       eventQueue = new ArrayBlockingQueue<Runnable>(maxQueueSize);
     }
 
-    processor = new ThreadPoolExecutor(poolSize, poolSize, timeOutSeconds, TimeUnit.SECONDS, eventQueue,
-      new ThreadFactory() {
-        @Override
-        public Thread newThread(Runnable runnable) {
-          Thread th = new Thread(runnable);
-          th.setName("event_processor_thread");
-          th.setDaemon(isDaemon);
-          return th;
-        }
-      });
-    processor.allowCoreThreadTimeOut(true);
+    ThreadFactory factory = new BasicThreadFactory.Builder()
+      .namingPattern(getName()+"_AsyncThread-%d")
+      .daemon(isDaemon)
+      .priority(Thread.NORM_PRIORITY)
+      .build();
+    processor = new ThreadPoolExecutor(poolSize, maxPoolSize, timeOutSeconds, TimeUnit.SECONDS, eventQueue, factory);
   }
 
   /**
@@ -98,7 +112,11 @@ public abstract class AsyncEventListener<T extends LensEvent> implements LensEve
       processor.execute(new Runnable() {
         @Override
         public void run() {
-          process(event);
+          try {
+            process(event);
+          } catch (Throwable e) {
+            log.error("{} Failed to process event {}", getName(), event, e);
+          }
         }
       });
     } catch (RejectedExecutionException rejected) {

http://git-wip-us.apache.org/repos/asf/lens/blob/73f92430/lens-server/src/main/java/org/apache/lens/server/EventServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/EventServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/EventServiceImpl.java
index a276828..369885d 100644
--- a/lens-server/src/main/java/org/apache/lens/server/EventServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/EventServiceImpl.java
@@ -21,6 +21,7 @@ package org.apache.lens.server;
 import java.util.*;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
+import java.util.concurrent.ThreadFactory;
 
 import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.error.LensException;
@@ -29,6 +30,7 @@ import org.apache.lens.server.api.events.LensEventListener;
 import org.apache.lens.server.api.events.LensEventService;
 import org.apache.lens.server.api.health.HealthStatus;
 
+import org.apache.commons.lang3.concurrent.BasicThreadFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hive.service.AbstractService;
 
@@ -64,8 +66,13 @@ public class EventServiceImpl extends AbstractService implements LensEventServic
   @Override
   public synchronized void init(HiveConf hiveConf) {
     int numProcs = Runtime.getRuntime().availableProcessors();
+    ThreadFactory factory = new BasicThreadFactory.Builder()
+      .namingPattern("Event_Service_Thread-%d")
+      .daemon(false)
+      .priority(Thread.NORM_PRIORITY)
+      .build();
     eventHandlerPool = Executors.newFixedThreadPool(hiveConf.getInt(LensConfConstants.EVENT_SERVICE_THREAD_POOL_SIZE,
-      numProcs));
+      numProcs), factory);
     super.init(hiveConf);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/73f92430/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java
index 110624a..ca00b4d 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryEndNotifier.java
@@ -77,12 +77,17 @@ public class QueryEndNotifier extends AsyncEventListener<QueryEnded> {
 
   private final LogSegregationContext logSegregationContext;
 
+  /** QueryEndNotifier core and max pool size */
+  private static final int CORE_POOL_SIZE = 2;
+  private static final int MAX_POOL_SIZE = 5;
+
   /** Instantiates a new query end notifier.
    *
    * @param queryService the query service
    * @param hiveConf     the hive conf */
   public QueryEndNotifier(QueryExecutionServiceImpl queryService, HiveConf hiveConf,
     @NonNull final LogSegregationContext logSegregationContext) {
+    super(CORE_POOL_SIZE, MAX_POOL_SIZE);
     this.queryService = queryService;
     HiveConf conf = hiveConf;
     from = conf.get(MAIL_FROM_ADDRESS);
@@ -113,23 +118,30 @@ public class QueryEndNotifier extends AsyncEventListener<QueryEnded> {
 
     boolean whetherMailNotify = Boolean.parseBoolean(queryContext.getConf().get(QUERY_MAIL_NOTIFY,
       WHETHER_MAIL_NOTIFY_DEFAULT));
-
     if (!whetherMailNotify) {
       return;
     }
 
-    String queryName = queryContext.getQueryName();
-    String mailSubject = "Query " + (StringUtils.isBlank(queryName) ? "" : (queryName + " "))
-      + queryContext.getStatus().getStatus() + ": " + event.getQueryHandle();
+    try {
+      //Create and Send EMAIL
+      String queryName = queryContext.getQueryName();
+      String mailSubject = "Query " + (StringUtils.isBlank(queryName) ? "" : (queryName + " "))
+        + queryContext.getStatus().getStatus() + ": " + event.getQueryHandle();
 
-    String mailMessage = createMailMessage(queryContext);
+      String mailMessage = createMailMessage(queryContext);
 
-    String to = queryContext.getSubmittedUser() + "@" + queryService.getServerDomain();
+      String to = queryContext.getSubmittedUser() + "@" + queryService.getServerDomain();
 
-    String cc = queryContext.getConf().get(QUERY_RESULT_EMAIL_CC, QUERY_RESULT_DEFAULT_EMAIL_CC);
+      String cc = queryContext.getConf().get(QUERY_RESULT_EMAIL_CC, QUERY_RESULT_DEFAULT_EMAIL_CC);
 
-    log.info("Sending completion email for query handle: {}", event.getQueryHandle());
-    sendMail(host, port, new Email(from, to, cc, mailSubject, mailMessage), mailSmtpTimeout, mailSmtpConnectionTimeout);
+      log.info("Sending completion email for query handle: {}", event.getQueryHandle());
+      sendMail(host, port, new Email(from, to, cc, mailSubject, mailMessage), mailSmtpTimeout,
+          mailSmtpConnectionTimeout);
+    } catch (Exception e) {
+      MetricsService metricsService = LensServices.get().getService(MetricsService.NAME);
+      metricsService.incrCounter(QueryEndNotifier.class, EMAIL_ERROR_COUNTER);
+      log.error("Error sending query end email", e);
+    }
   }
 
   /** Creates the mail message.
@@ -184,38 +196,32 @@ public class QueryEndNotifier extends AsyncEventListener<QueryEnded> {
    * @param mailSmtpTimeout           the mail smtp timeout
    * @param mailSmtpConnectionTimeout the mail smtp connection timeout */
   public static void sendMail(String host, String port,
-    Email email, int mailSmtpTimeout, int mailSmtpConnectionTimeout) {
+    Email email, int mailSmtpTimeout, int mailSmtpConnectionTimeout) throws Exception{
     Properties props = System.getProperties();
     props.put("mail.smtp.host", host);
     props.put("mail.smtp.port", port);
     props.put("mail.smtp.timeout", mailSmtpTimeout);
     props.put("mail.smtp.connectiontimeout", mailSmtpConnectionTimeout);
     Session session = Session.getDefaultInstance(props, null);
-    try {
-      MimeMessage message = new MimeMessage(session);
-      message.setFrom(new InternetAddress(email.getFrom()));
-      for (String recipient : email.getTo().trim().split("\\s*,\\s*")) {
-        message.addRecipients(Message.RecipientType.TO, InternetAddress.parse(recipient));
-      }
-      if (email.getCc() != null && email.getCc().length() > 0) {
-        for (String recipient : email.getCc().trim().split("\\s*,\\s*")) {
-          message.addRecipients(Message.RecipientType.CC, InternetAddress.parse(recipient));
-        }
+    MimeMessage message = new MimeMessage(session);
+    message.setFrom(new InternetAddress(email.getFrom()));
+    for (String recipient : email.getTo().trim().split("\\s*,\\s*")) {
+      message.addRecipients(Message.RecipientType.TO, InternetAddress.parse(recipient));
+    }
+    if (email.getCc() != null && email.getCc().length() > 0) {
+      for (String recipient : email.getCc().trim().split("\\s*,\\s*")) {
+        message.addRecipients(Message.RecipientType.CC, InternetAddress.parse(recipient));
       }
-      message.setSubject(email.getSubject());
-      message.setSentDate(new Date());
+    }
+    message.setSubject(email.getSubject());
+    message.setSentDate(new Date());
 
-      MimeBodyPart messagePart = new MimeBodyPart();
-      messagePart.setText(email.getMessage());
-      Multipart multipart = new MimeMultipart();
+    MimeBodyPart messagePart = new MimeBodyPart();
+    messagePart.setText(email.getMessage());
+    Multipart multipart = new MimeMultipart();
 
-      multipart.addBodyPart(messagePart);
-      message.setContent(multipart);
-      Transport.send(message);
-    } catch (Exception e) {
-      MetricsService metricsService = LensServices.get().getService(MetricsService.NAME);
-      metricsService.incrCounter(QueryEndNotifier.class, EMAIL_ERROR_COUNTER);
-      log.error("Error sending query end email", e);
-    }
+    multipart.addBodyPart(messagePart);
+    message.setContent(multipart);
+    Transport.send(message);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/73f92430/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java b/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java
index f568b17..9955278 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/ResultFormatter.java
@@ -46,6 +46,10 @@ public class ResultFormatter extends AsyncEventListener<QueryExecuted> {
   /** The query service. */
   QueryExecutionServiceImpl queryService;
 
+  /** ResultFormatter core and max pool size */
+  private static final int CORE_POOL_SIZE = 5;
+  private static final int MAX_POOL_SIZE = 10;
+
   private final LogSegregationContext logSegregationContext;
 
   /**
@@ -54,6 +58,7 @@ public class ResultFormatter extends AsyncEventListener<QueryExecuted> {
    * @param queryService the query service
    */
   public ResultFormatter(QueryExecutionServiceImpl queryService, @NonNull LogSegregationContext logSegregationContext) {
+    super(CORE_POOL_SIZE, MAX_POOL_SIZE);
     this.queryService = queryService;
     this.logSegregationContext = logSegregationContext;
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/73f92430/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
index 702a529..a2ca17f 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
@@ -20,6 +20,9 @@ package org.apache.lens.server.query;
 
 import static org.testng.Assert.*;
 
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
 import java.util.UUID;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
@@ -503,4 +506,46 @@ public class TestEventService {
 
   }
 
+  @Test
+  public void testAysncEventListenerPoolThreads(){
+    AsyncEventListener<QuerySuccess> ayncListener = new DummyAsncEventListener();
+    for(int i=0; i<10; i++){
+      try {
+        //A pool thread is created each time an event is submitted until core pool size is reached which is 5
+        //for this test case.  @see org.apache.lens.server.api.events.AsyncEventListener.processor
+        ayncListener.onEvent(null);
+      } catch (LensException e) {
+        assert(false); //Not Expected
+      }
+    }
+
+    //Verify the core pool Threads after the events have been fired
+    ThreadGroup currentTG = Thread.currentThread().getThreadGroup();
+    int count = currentTG.activeCount();
+    Thread[] threads = new Thread[count];
+    currentTG.enumerate(threads);
+    Set<String> aysncThreadNames = new HashSet<String>();
+    for(Thread t : threads){
+      if (t.getName().contains("DummyAsncEventListener_AsyncThread")){
+        aysncThreadNames.add(t.getName());
+      }
+    }
+    assertTrue(aysncThreadNames.containsAll(Arrays.asList(
+      "DummyAsncEventListener_AsyncThread-1",
+      "DummyAsncEventListener_AsyncThread-2",
+      "DummyAsncEventListener_AsyncThread-3",
+      "DummyAsncEventListener_AsyncThread-4",
+      "DummyAsncEventListener_AsyncThread-5")));
+  }
+
+  private static class DummyAsncEventListener extends AsyncEventListener<QuerySuccess> {
+    public DummyAsncEventListener(){
+      super(5, 10); //core pool = 5 and max Pool size =10
+    }
+    @Override
+    public void process(QuerySuccess event) {
+      throw new RuntimeException("Simulated Exception");
+    }
+  }
+
 }


[27/50] [abbrv] lens git commit: LENS-123 : Adds ability to load different instances of same driver class

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index 7201e0d..ffd2d42 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -47,6 +47,7 @@ import org.apache.lens.driver.hive.HiveDriver;
 import org.apache.lens.server.BaseLensService;
 import org.apache.lens.server.LensServerConf;
 import org.apache.lens.server.LensServices;
+import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.driver.*;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.error.LensMultiCauseException;
@@ -72,7 +73,8 @@ import org.apache.lens.server.util.FairPriorityBlockingQueue;
 import org.apache.lens.server.util.UtilityMethods;
 
 import org.apache.commons.collections.CollectionUtils;
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -86,6 +88,7 @@ import org.slf4j.LoggerFactory;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableSet;
+
 import lombok.*;
 import lombok.extern.slf4j.Slf4j;
 
@@ -337,25 +340,9 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
    * @throws LensException the lens exception
    */
   private void loadDriversAndSelector() throws LensException {
-    Class<?>[] driverClasses = conf.getClasses(DRIVER_CLASSES);
-    if (driverClasses != null) {
-      for (Class<?> driverClass : driverClasses) {
-        try {
-          LensDriver driver = (LensDriver) driverClass.newInstance();
-          driver.configure(LensServerConf.getConfForDrivers());
-          if (driver instanceof HiveDriver) {
-            driver.registerDriverEventListener(driverEventListener);
-          }
-          drivers.put(driverClass.getName(), driver);
-          log.info("Driver for {} is loaded", driverClass);
-        } catch (Exception e) {
-          log.warn("Could not load the driver:{}", driverClass, e);
-          throw new LensException("Could not load driver " + driverClass, e);
-        }
-      }
-    } else {
-      throw new LensException("No drivers specified");
-    }
+    //Load all configured Drivers
+    loadDrivers();
+    //Load configured Driver Selector
     try {
       Class<? extends DriverSelector> driverSelectorClass = conf.getClass(DRIVER_SELECTOR_CLASS,
         MinQueryCostSelector.class,
@@ -369,6 +356,87 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
     }
   }
 
+  /**
+   * Loads drivers for the configured Driver types in lens-site.xml
+   *
+   * The driver's resources (<driver_type>-site.xml and other files) should be present
+   * under directory conf/drivers/<driver-type>/<driver-name>
+   * Example :conf/drivers/hive/h1, conf/drivers/hive/h2, conf/drivers/jdbc/mysql1, conf/drivers/jdbc/vertica1
+   *
+   * @throws LensException
+   */
+  private void loadDrivers() throws LensException {
+    Collection<String> driverTypes = conf.getStringCollection(DRIVER_TYPES_AND_CLASSES);
+    if (driverTypes.isEmpty()) {
+      throw new LensException("No drivers configured");
+    }
+    File driversBaseDir = new File(System.getProperty(LensConfConstants.CONFIG_LOCATION,
+        LensConfConstants.DEFAULT_CONFIG_LOCATION), LensConfConstants.DRIVERS_BASE_DIR);
+    if (!driversBaseDir.isDirectory()) {
+      throw new LensException("No drivers found at location " + driversBaseDir.getAbsolutePath());
+    }
+    for (String driverType : driverTypes) {
+      if (StringUtils.isBlank(driverType)) {
+        throw new LensException("Driver type Configuration not specified correctly. Encountered blank driver type");
+      }
+      String[] driverTypeAndClass = StringUtils.split(driverType.trim(), ':');
+      if (driverTypeAndClass.length != 2) {
+        throw new LensException("Driver type Configuration not specified correctly : " + driverType);
+      }
+      loadDriversForType(driverTypeAndClass[0], driverTypeAndClass[1], driversBaseDir);
+    }
+    if (drivers.isEmpty()){
+      throw new LensException("No drivers loaded. Please check the drivers in :"+driversBaseDir);
+    }
+  }
+  /**
+   * Loads drivers of a particular type
+   *
+   * @param driverType : type of driver (hive, jdbc, el, etc)
+   * @param driverTypeClassName :driver class name
+   * @param driversBaseDir :path for drivers directory where all driver relates resources are avilable
+   * @throws LensException
+   */
+  private void loadDriversForType(String driverType, String driverTypeClassName, File driversBaseDir)
+    throws LensException {
+    File driverTypeBaseDir = new File(driversBaseDir, driverType);
+    File[] driverPaths = driverTypeBaseDir.listFiles();
+    if (!driverTypeBaseDir.isDirectory() || driverPaths == null || driverPaths.length == 0) {
+      // May be the deployment does not have drivers of this type. We can log and ignore.
+      log.warn("No drivers of type {} found in {}.", driverType, driverTypeBaseDir.getAbsolutePath());
+      return;
+    }
+    Class driverTypeClass = null;
+    try {
+      driverTypeClass = conf.getClassByName(driverTypeClassName);
+    } catch (Exception e) {
+      log.error("Could not load the driver type class {}", driverTypeClassName, e);
+      throw new LensException("Could not load Driver type class " + driverTypeClassName);
+    }
+    LensDriver driver = null;
+    String driverName = null;
+    for (File driverPath : driverPaths) {
+      try {
+        if (!driverPath.isDirectory()){
+          log.warn("Ignoring resource {} while loading drivers. A driver directory was expected instead",
+              driverPath.getAbsolutePath());
+          continue;
+        }
+        driverName = driverPath.getName();
+        driver = (LensDriver) driverTypeClass.newInstance();
+        driver.configure(LensServerConf.getConfForDrivers(), driverType, driverName);
+        // Register listener for all drivers. Drivers can choose to ignore this registration. As of now only Hive
+        // Driver supports driver event listeners.
+        driver.registerDriverEventListener(driverEventListener);
+        drivers.put(driver.getFullyQualifiedName(), driver);
+        log.info("Driver {} for type {} is loaded", driverPath.getName(), driverType);
+      } catch (Exception e) {
+        log.error("Could not load driver {} of type {}", driverPath.getName(), driverType, e);
+        throw new LensException("Could not load driver "+driverPath.getName()+ " of type "+ driverType);
+      }
+    }
+  }
+
   private MetricsService getMetrics() {
     if (metricsService == null) {
       metricsService = LensServices.get().getService(MetricsService.NAME);
@@ -2088,7 +2156,7 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
         long querySubmitTime = context.getSubmissionTime();
         if ((filterByStatus && status != context.getStatus().getStatus())
           || (filterByQueryName && !context.getQueryName().toLowerCase().contains(queryName))
-          || (filterByDriver && !context.getSelectedDriver().getClass().getName().equalsIgnoreCase(driver))
+          || (filterByDriver && !context.getSelectedDriver().getFullyQualifiedName().equalsIgnoreCase(driver))
           || (!"all".equalsIgnoreCase(userName) && !userName.equalsIgnoreCase(context.getSubmittedUser()))
           || (!(fromDate <= querySubmitTime && querySubmitTime <= toDate))) {
           itr.remove();
@@ -2305,21 +2373,25 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
     // Restore drivers
     synchronized (drivers) {
       int numDrivers = in.readInt();
+      String driverQualifiedName;
+      String driverClsName;
       for (int i = 0; i < numDrivers; i++) {
-        String driverClsName = in.readUTF();
-        LensDriver driver = drivers.get(driverClsName);
+        driverQualifiedName = in.readUTF();
+        driverClsName = in.readUTF();
+        LensDriver driver = drivers.get(driverQualifiedName);
         if (driver == null) {
           // this driver is removed in the current server restart
           // we will create an instance and read its state still.
           try {
             Class<? extends LensDriver> driverCls = (Class<? extends LensDriver>) Class.forName(driverClsName);
             driver = (LensDriver) driverCls.newInstance();
-            driver.configure(conf);
+            String[] driverTypeAndName = StringUtils.split(driverQualifiedName, '/');
+            driver.configure(conf, driverTypeAndName[0], driverTypeAndName[1]);
           } catch (Exception e) {
-            log.error("Could not instantiate driver:{}", driverClsName, e);
+            log.error("Could not instantiate driver:{} represented by class {}", driverQualifiedName, driverClsName, e);
             throw new IOException(e);
           }
-          log.info("Driver state for {} will be ignored", driverClsName);
+          log.info("Driver state for {} will be ignored", driverQualifiedName);
         }
         driver.readExternal(in);
       }
@@ -2342,8 +2414,8 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
         // set the selected driver if available, if not available for the cases of queued queries,
         // query service will do the selection from existing drivers and update
         if (driverAvailable) {
-          String clsName = in.readUTF();
-          ctx.getDriverContext().setSelectedDriver(drivers.get(clsName));
+          String selectedDriverQualifiedName = in.readUTF();
+          ctx.getDriverContext().setSelectedDriver(drivers.get(selectedDriverQualifiedName));
           ctx.setDriverQuery(ctx.getSelectedDriver(), ctx.getSelectedDriverQuery());
         }
         allQueries.put(ctx.getQueryHandle(), ctx);
@@ -2373,6 +2445,7 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
           break;
         case CLOSED:
           allQueries.remove(ctx.getQueryHandle());
+          log.info("Removed closed query from all Queries:"+ctx.getQueryHandle());
         }
       }
       queuedQueries.addAll(allRestoredQueuedQueries);
@@ -2391,8 +2464,11 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
     // persist all drivers
     synchronized (drivers) {
       out.writeInt(drivers.size());
-      for (LensDriver driver : drivers.values()) {
+      LensDriver driver = null;
+      for (Map.Entry<String, LensDriver> driverEntry : drivers.entrySet()) {
+        driver = driverEntry.getValue();
         synchronized (driver) {
+          out.writeUTF(driverEntry.getKey());
           out.writeUTF(driver.getClass().getName());
           driver.writeExternal(out);
         }
@@ -2407,7 +2483,7 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
           boolean isDriverAvailable = (ctx.getSelectedDriver() != null);
           out.writeBoolean(isDriverAvailable);
           if (isDriverAvailable) {
-            out.writeUTF(ctx.getSelectedDriver().getClass().getName());
+            out.writeUTF(ctx.getSelectedDriver().getFullyQualifiedName());
           }
         }
       }
@@ -2616,19 +2692,19 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
       List<ResourceEntry> resources = session.getLensSessionPersistInfo().getResources();
       if (resources != null && !resources.isEmpty()) {
         for (ResourceEntry resource : resources) {
-          log.info("Restoring resource {} for session {}", resource, lensSession);
+          log.info("{} Restoring resource {} for session {}", hiveDriver, resource, lensSession);
           String command = "add " + resource.getType().toLowerCase() + " " + resource.getLocation();
           try {
             // Execute add resource query in blocking mode
             hiveDriver.execute(createResourceQuery(command, sessionHandle, hiveDriver));
             resource.restoredResource();
-            log.info("Restored resource {} for session {}", resource, lensSession);
+            log.info("{} Restored resource {} for session {}", hiveDriver, resource, lensSession);
           } catch (Exception exc) {
-            log.error("Unable to add resource {} for session {}", resource, lensSession, exc);
+            log.error("{} Unable to add resource {} for session {}", hiveDriver, resource, lensSession, exc);
           }
         }
       } else {
-        log.info("No resources to restore for session {}", lensSession);
+        log.info("{} No resources to restore for session {}", hiveDriver, lensSession);
       }
     } catch (Exception e) {
       log.warn(
@@ -2730,7 +2806,7 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
 
     String command = "add " + res.getType().toLowerCase() + " " + uri;
     driver.execute(createResourceQuery(command, sessionHandle, driver));
-    log.info("Added resource to hive driver for session {} cmd: {}", sessionIdentifier, command);
+    log.info("Added resource to hive driver {} for session {} cmd: {}", driver, sessionIdentifier, command);
   }
 
   private boolean removeFromLaunchedQueries(final QueryContext finishedQuery) {

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java b/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
index 6c464fb..abec2b3 100644
--- a/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
+++ b/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
@@ -368,7 +368,7 @@ public final class RewriteUtil {
       log.warn("Driver : {}  Skipped for the query rewriting due to ", driver, e);
       ctx.setDriverRewriteError(driver, e);
       failureCause = new StringBuilder(" Driver :")
-          .append(driver.getClass().getName())
+          .append(driver.getFullyQualifiedName())
           .append(" Cause :" + e.getLocalizedMessage())
           .toString();
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java b/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
index 9d8f198..cc62d92 100644
--- a/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
@@ -23,6 +23,7 @@ import java.io.IOException;
 import java.io.ObjectInput;
 import java.io.ObjectOutput;
 import java.util.*;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import javax.ws.rs.NotFoundException;
 
@@ -435,13 +436,11 @@ public class LensSessionImpl extends HiveSessionImpl {
     final String location;
     // For tests
     /** The restore count. */
-    @Getter
-    transient int restoreCount;
+    transient AtomicInteger restoreCount = new AtomicInteger();
 
     /** Set of databases for which this resource has been added */
     final transient Set<String> databases = new HashSet<String>();
 
-
     /**
      * Instantiates a new resource entry.
      *
@@ -468,7 +467,15 @@ public class LensSessionImpl extends HiveSessionImpl {
      * Restored resource.
      */
     public void restoredResource() {
-      restoreCount++;
+      restoreCount.incrementAndGet();
+    }
+
+    /**
+     * Returns the value of restoreCount for the resource
+     * @return
+     */
+    public int getRestoreCount(){
+      return restoreCount.get();
     }
 
     /*

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/main/resources/lensserver-default.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/main/resources/lensserver-default.xml b/lens-server/src/main/resources/lensserver-default.xml
index 5f268cb..39b72dc 100644
--- a/lens-server/src/main/resources/lensserver-default.xml
+++ b/lens-server/src/main/resources/lensserver-default.xml
@@ -24,7 +24,7 @@
 <configuration>
   <property>
     <name>lens.server.drivers</name>
-    <value>org.apache.lens.driver.hive.HiveDriver</value>
+    <value>hive:org.apache.lens.driver.hive.HiveDriver</value>
     <description>Drivers enabled for this lens server instance</description>
   </property>
 

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java b/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
index 8ba9353..8f52ddd 100644
--- a/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
+++ b/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
@@ -33,6 +33,7 @@ import java.util.concurrent.ConcurrentLinkedQueue;
 import javax.ws.rs.core.UriBuilder;
 
 import org.apache.lens.driver.hive.TestRemoteHiveDriver;
+import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.metrics.LensMetricsUtil;
 import org.apache.lens.server.api.metrics.MetricsService;
 import org.apache.lens.server.model.LogSegregationContext;
@@ -48,6 +49,7 @@ import org.testng.annotations.AfterSuite;
 import org.testng.annotations.BeforeSuite;
 
 import com.google.common.collect.Lists;
+
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -120,6 +122,7 @@ public abstract class LensJerseyTest extends JerseyTest {
   public void startAll() throws Exception {
     log.info("Before suite");
     System.setProperty("lens.log.dir", "target/");
+    System.setProperty(LensConfConstants.CONFIG_LOCATION, "target/test-classes/");
     TestRemoteHiveDriver.createHS2Service();
     System.out.println("Remote hive server started!");
     HiveConf hiveConf = new HiveConf();

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java b/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
index 7b6c560..877200f 100644
--- a/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
+++ b/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
@@ -70,6 +70,11 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
   /** The data file. */
   private File dataFile;
 
+  /**
+   * No of valid hive drivers that can execute queries in this test class
+   */
+  private static final int NO_OF_HIVE_DRIVERS = 2;
+
   /*
    * (non-Javadoc)
    *
@@ -347,7 +352,10 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     // Now we can expect that session resources have been added back exactly once
     for (int i = 0; i < sessionResources.size(); i++) {
       LensSessionImpl.ResourceEntry resourceEntry = sessionResources.get(i);
-      assertEquals(resourceEntry.getRestoreCount(), 1 + restoreCounts[i],
+      //The restore count can vary based on How many Hive Drivers were able to execute the estimate on the query
+      //successfully after Hive Server Restart.
+      Assert.assertTrue((resourceEntry.getRestoreCount() > restoreCounts[i]
+          && resourceEntry.getRestoreCount() <=  restoreCounts[i] + NO_OF_HIVE_DRIVERS),
           "Restore test failed for " + resourceEntry + " pre count=" + restoreCounts[i] + " post count=" + resourceEntry
               .getRestoreCount());
       log.info("@@ Latest count {}={}", resourceEntry, resourceEntry.getRestoreCount());

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
index 1dab35e..702a529 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestEventService.java
@@ -30,6 +30,7 @@ import org.apache.lens.api.query.QueryStatus;
 import org.apache.lens.server.EventServiceImpl;
 import org.apache.lens.server.LensServerConf;
 import org.apache.lens.server.LensServices;
+import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.events.AsyncEventListener;
 import org.apache.lens.server.api.events.LensEvent;
@@ -275,6 +276,7 @@ public class TestEventService {
    */
   @BeforeTest
   public void setup() throws Exception {
+    System.setProperty(LensConfConstants.CONFIG_LOCATION, "target/test-classes/");
     LensServices.get().init(LensServerConf.getHiveConf());
     LensServices.get().start();
     service = LensServices.get().getService(LensEventService.NAME);

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java b/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java
index bc1463f..01e846a 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestLensDAO.java
@@ -141,7 +141,7 @@ public class TestLensDAO extends LensJerseyTest {
 
     System.out.println("@@ State = " + queryContext.getStatus().getStatus().name());
     List<QueryHandle> daoTestQueryHandles = service.lensServerDao.findFinishedQueries(finishedLensQuery.getStatus(),
-      queryContext.getSubmittedUser(), queryContext.getSelectedDriver().getClass().getName(), "daotestquery1", -1L,
+        queryContext.getSubmittedUser(), queryContext.getSelectedDriver().getFullyQualifiedName(), "daotestquery1", -1L,
       Long.MAX_VALUE);
     Assert.assertEquals(daoTestQueryHandles.size(), 1);
     Assert.assertEquals(daoTestQueryHandles.get(0).getHandleId().toString(), finishedHandle);

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java
index eb94c89..ab42a3d 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java
@@ -19,7 +19,6 @@
 package org.apache.lens.server.query;
 
 import static org.apache.lens.server.api.LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY;
-import static org.apache.lens.server.api.util.LensUtil.getImplementations;
 
 import static org.testng.Assert.*;
 
@@ -39,7 +38,6 @@ import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.LensServerAPITestUtil;
 import org.apache.lens.server.api.driver.DriverSelector;
 import org.apache.lens.server.api.driver.LensDriver;
-import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.metrics.MetricsService;
 import org.apache.lens.server.api.query.AbstractQueryContext;
 import org.apache.lens.server.api.query.QueryExecutionService;
@@ -52,13 +50,16 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.glassfish.jersey.client.ClientConfig;
 import org.glassfish.jersey.media.multipart.MultiPartFeature;
 import org.glassfish.jersey.test.TestProperties;
+
 import org.testng.annotations.AfterMethod;
 import org.testng.annotations.AfterTest;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
 import com.beust.jcommander.internal.Lists;
+
 import com.google.common.base.Optional;
+
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -69,52 +70,6 @@ import lombok.extern.slf4j.Slf4j;
 public class TestQueryConstraints extends LensJerseyTest {
   private HiveConf serverConf;
 
-  public static class MockHiveDriverBase extends HiveDriver {
-
-    private final Configuration customConf;
-
-    /**
-     * Instantiates a new hive driver.
-     *
-     * @throws LensException the lens exception
-     */
-    public MockHiveDriverBase() throws LensException {
-      customConf = new Configuration();
-      customConf.setInt("driver.max.concurrent.launched.queries", 2);
-      customConf.set(HiveDriver.QUERY_LAUNCHING_CONSTRAINT_FACTORIES_KEY,
-        "org.apache.lens.server.api.query.constraint.MaxConcurrentDriverQueriesConstraintFactory");
-    }
-
-    @Override
-    public void configure(Configuration conf) throws LensException {
-      super.configure(conf);
-      queryConstraints = getImplementations(HiveDriver.QUERY_LAUNCHING_CONSTRAINT_FACTORIES_KEY, customConf);
-    }
-  }
-
-  public static class HiveDriver1 extends MockHiveDriverBase {
-
-    /**
-     * Instantiates a new hive driver.
-     *
-     * @throws LensException the lens exception
-     */
-    public HiveDriver1() throws LensException {
-
-    }
-  }
-
-  public static class HiveDriver2 extends MockHiveDriverBase {
-
-    /**
-     * Instantiates a new hive driver.
-     *
-     * @throws LensException the lens exception
-     */
-    public HiveDriver2() throws LensException {
-    }
-  }
-
   public static class RoundRobinSelector implements DriverSelector {
     int counter = 0;
 
@@ -158,8 +113,8 @@ public class TestQueryConstraints extends LensJerseyTest {
   public HiveConf getServerConf() {
     if (serverConf == null) {
       serverConf = new HiveConf(super.getServerConf());
-      serverConf.set(LensConfConstants.DRIVER_CLASSES,
-        HiveDriver1.class.getName() + "," + HiveDriver2.class.getName());
+      // Lets test only mockHive. updating lens server conf for same
+      serverConf.set(LensConfConstants.DRIVER_TYPES_AND_CLASSES, "mockHive:" + HiveDriver.class.getName());
       serverConf.set("lens.server.driver.selector.class", RoundRobinSelector.class.getName());
       LensServerConf.getConfForDrivers().addResource(serverConf);
     }
@@ -265,11 +220,6 @@ public class TestQueryConstraints extends LensJerseyTest {
     QueryExecutionServiceImpl.QueryCount count = queryService.getQueryCountSnapshot();
     assertTrue(count.running <= 4, System.currentTimeMillis() + " " + count.running + " running queries: "
       + queryService.getLaunchedQueries());
-    if (count.running == 4) {
-      assertEquals(count.queued, 0);
-    } else {
-      assertEquals(count.waiting, 0);
-    }
   }
 
   private QueryHandle launchQuery() {

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index c8a1cc6..f6693aa 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -222,6 +222,18 @@ public class TestQueryService extends LensJerseyTest {
     assertEquals(rs.getStatus(), 400);
   }
 
+  @Test
+  public void testLoadingMultipleDrivers() {
+    Collection<LensDriver> drivers = queryService.getDrivers();
+    assertEquals(drivers.size(), 4);
+    Set<String> driverNames = new HashSet<String>(drivers.size());
+    for(LensDriver driver : drivers){
+      assertEquals(driver.getConf().get("lens.driver.test.drivername"), driver.getFullyQualifiedName());
+      driverNames.add(driver.getFullyQualifiedName());
+    }
+    assertTrue(driverNames.containsAll(Arrays.asList("hive/hive1", "hive/hive2", "jdbc/jdbc1", "mock/fail1")));
+  }
+
   /**
    * Test rewrite failure in execute operation.
    *
@@ -464,8 +476,8 @@ public class TestQueryService extends LensJerseyTest {
       .get(LensPreparedQuery.class);
     assertTrue(ctx.getUserQuery().equalsIgnoreCase("select ID from " + TEST_TABLE));
     assertTrue(ctx.getDriverQuery().equalsIgnoreCase("select ID from " + TEST_TABLE));
-    assertEquals(ctx.getSelectedDriverClassName(),
-      org.apache.lens.driver.hive.HiveDriver.class.getCanonicalName());
+    //both drivers hive/hive1 and hive/hive2 are capable of handling the query as they point to the same hive server
+    assertTrue(ctx.getSelectedDriverName().equals("hive/hive1") || ctx.getSelectedDriverName().equals("hive/hive2"));
     assertNull(ctx.getConf().getProperties().get("my.property"));
 
     // Update conf for prepared query
@@ -539,8 +551,8 @@ public class TestQueryService extends LensJerseyTest {
       .request().get(LensPreparedQuery.class);
     assertTrue(ctx.getUserQuery().equalsIgnoreCase("select ID from " + TEST_TABLE));
     assertTrue(ctx.getDriverQuery().equalsIgnoreCase("select ID from " + TEST_TABLE));
-    assertEquals(ctx.getSelectedDriverClassName(),
-      org.apache.lens.driver.hive.HiveDriver.class.getCanonicalName());
+    //both drivers hive/hive1 and hive/hive2 are capable of handling the query as they point to the same hive server
+    assertTrue(ctx.getSelectedDriverName().equals("hive/hive1") || ctx.getSelectedDriverName().equals("hive/hive2"));
     assertNull(ctx.getConf().getProperties().get("my.property"));
 
     // Update conf for prepared query
@@ -1275,6 +1287,9 @@ public class TestQueryService extends LensJerseyTest {
         if (driver instanceof HiveDriver) {
           addedToHiveDriver =
             ((HiveDriver) driver).areDBResourcesAddedForSession(sessionHandle.getPublicId().toString(), DB_WITH_JARS);
+          if (addedToHiveDriver){
+            break; //There are two Hive drivers now both pointing to same hive server. So break after first success
+          }
         }
       }
       assertTrue(addedToHiveDriver);
@@ -1396,12 +1411,15 @@ public class TestQueryService extends LensJerseyTest {
 
     assertTrue(reg.getGauges().keySet().containsAll(Arrays.asList(
         "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-DRIVER_SELECTION",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-HiveDriver-CUBE_REWRITE",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-HiveDriver-DRIVER_ESTIMATE",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-HiveDriver-RewriteUtil-rewriteQuery",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-JDBCDriver-CUBE_REWRITE",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-JDBCDriver-DRIVER_ESTIMATE",
-        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-JDBCDriver-RewriteUtil-rewriteQuery",
+        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive1-CUBE_REWRITE",
+        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive1-DRIVER_ESTIMATE",
+        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive1-RewriteUtil-rewriteQuery",
+        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive2-CUBE_REWRITE",
+        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive2-DRIVER_ESTIMATE",
+        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-hive/hive2-RewriteUtil-rewriteQuery",
+        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-jdbc/jdbc1-CUBE_REWRITE",
+        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-jdbc/jdbc1-DRIVER_ESTIMATE",
+        "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-jdbc/jdbc1-RewriteUtil-rewriteQuery",
         "lens.MethodMetricGauge.TestQueryService-testEstimateGauges-PARALLEL_ESTIMATE")),
       reg.getGauges().keySet().toString());
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java b/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
index 2827b96..202db82 100644
--- a/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
+++ b/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
@@ -177,7 +177,7 @@ public class TestRewriting {
     MockDriver driver = new MockDriver();
     LensConf lensConf = new LensConf();
     Configuration conf = new Configuration();
-    driver.configure(conf);
+    driver.configure(conf, null, null);
     drivers.add(driver);
 
     CubeQueryRewriter mockWriter = getMockedRewriter();
@@ -190,7 +190,7 @@ public class TestRewriting {
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
     conf.set(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY, TestRewriting.class.getSimpleName());
-    driver.configure(conf);
+    driver.configure(conf, null, null);
     String q2 = "cube select name from table";
     Assert.assertTrue(RewriteUtil.isCubeQuery(q2));
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
@@ -201,8 +201,8 @@ public class TestRewriting {
     MetricRegistry reg = LensMetricsRegistry.getStaticRegistry();
 
     Assert.assertTrue(reg.getGauges().keySet().containsAll(Arrays.asList(
-      "lens.MethodMetricGauge.TestRewriting-MockDriver-RewriteUtil-rewriteQuery",
-      "lens.MethodMetricGauge.TestRewriting-MockDriver-1-RewriteUtil-rewriteQuery-toHQL")));
+      "lens.MethodMetricGauge.TestRewriting-"+driver.getFullyQualifiedName()+"-RewriteUtil-rewriteQuery",
+      "lens.MethodMetricGauge.TestRewriting-"+driver.getFullyQualifiedName()+"-1-RewriteUtil-rewriteQuery-toHQL")));
     conf.unset(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY);
 
     q2 = "insert overwrite directory 'target/rewrite' cube select name from table";
@@ -290,9 +290,10 @@ public class TestRewriting {
     runRewrites(RewriteUtil.rewriteQuery(ctx));
     reg = LensMetricsRegistry.getStaticRegistry();
     Assert.assertTrue(reg.getGauges().keySet().containsAll(Arrays.asList(
-      "lens.MethodMetricGauge.TestRewriting-multiple-MockDriver-1-RewriteUtil-rewriteQuery-toHQL",
-      "lens.MethodMetricGauge.TestRewriting-multiple-MockDriver-2-RewriteUtil-rewriteQuery-toHQL",
-      "lens.MethodMetricGauge.TestRewriting-multiple-MockDriver-RewriteUtil-rewriteQuery")));
+      "lens.MethodMetricGauge.TestRewriting-"+driver.getFullyQualifiedName()+"-1-RewriteUtil-rewriteQuery-toHQL",
+      "lens.MethodMetricGauge.TestRewriting-multiple-"+driver.getFullyQualifiedName()
+        +"-2-RewriteUtil-rewriteQuery-toHQL",
+      "lens.MethodMetricGauge.TestRewriting-multiple-"+driver.getFullyQualifiedName()+"-RewriteUtil-rewriteQuery")));
     conf.unset(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY);
 
     q2 = "select * from (cube select name from table) a full outer join"
@@ -418,7 +419,7 @@ public class TestRewriting {
 
     // failing query for second driver
     MockDriver driver2 = new MockDriver();
-    driver2.configure(conf);
+    driver2.configure(conf, null, null);
     drivers.add(driver2);
 
     Assert.assertEquals(drivers.size(), 2);

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/drivers/hive/hive1/hivedriver-site.xml b/lens-server/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
new file mode 100644
index 0000000..6362473
--- /dev/null
+++ b/lens-server/src/test/resources/drivers/hive/hive1/hivedriver-site.xml
@@ -0,0 +1,85 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+  <property>
+    <name>lens.driver.hive.connection.class</name>
+    <value>org.apache.lens.driver.hive.RemoteThriftConnection</value>
+    <description>The connection class from HiveDriver to HiveServer.</description>
+  </property>
+
+  <property>
+     
+    <name>hive.metastore.local</name>
+     
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.metastore.warehouse.dir</name>
+    <value>${project.build.directory}/hive/warehouse</value>
+  </property>
+
+  <property>
+    <name>javax.jdo.option.ConnectionURL</name>
+    <value>jdbc:derby:;databaseName=target/metastore_db;create=true</value>
+    <description>JDBC connect string for a JDBC metastore</description>
+  </property>
+
+  <property>
+    <name>hive.lock.manager</name>
+    <value>org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.bind.host</name>
+    <value>localhost</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.port</name>
+    <value>12345</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.connect.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>lens.driver.test.key</name>
+    <value>set</value>
+  </property>
+
+  <property>
+    <name>lens.driver.test.drivername</name>
+    <value>hive/hive1</value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/resources/drivers/hive/hive2/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/drivers/hive/hive2/hivedriver-site.xml b/lens-server/src/test/resources/drivers/hive/hive2/hivedriver-site.xml
new file mode 100644
index 0000000..d5be96e
--- /dev/null
+++ b/lens-server/src/test/resources/drivers/hive/hive2/hivedriver-site.xml
@@ -0,0 +1,85 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+  <property>
+    <name>lens.driver.hive.connection.class</name>
+    <value>org.apache.lens.driver.hive.RemoteThriftConnection</value>
+    <description>The connection class from HiveDriver to HiveServer.</description>
+  </property>
+
+  <property>
+     
+    <name>hive.metastore.local</name>
+     
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.metastore.warehouse.dir</name>
+    <value>${project.build.directory}/hive/warehouse</value>
+  </property>
+
+  <property>
+    <name>javax.jdo.option.ConnectionURL</name>
+    <value>jdbc:derby:;databaseName=target/metastore_db;create=true</value>
+    <description>JDBC connect string for a JDBC metastore</description>
+  </property>
+
+  <property>
+    <name>hive.lock.manager</name>
+    <value>org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.bind.host</name>
+    <value>localhost</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.port</name>
+    <value>12345</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.connect.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>lens.driver.test.key</name>
+    <value>set</value>
+  </property>
+
+  <property>
+    <name>lens.driver.test.drivername</name>
+    <value>hive/hive2</value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/resources/drivers/jdbc/jdbc1/jdbcdriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/drivers/jdbc/jdbc1/jdbcdriver-site.xml b/lens-server/src/test/resources/drivers/jdbc/jdbc1/jdbcdriver-site.xml
new file mode 100644
index 0000000..9ed0c87
--- /dev/null
+++ b/lens-server/src/test/resources/drivers/jdbc/jdbc1/jdbcdriver-site.xml
@@ -0,0 +1,58 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+  <property>
+    <name>lens.driver.jdbc.driver.class</name>
+    <value>org.hsqldb.jdbcDriver</value>
+  </property>
+  <property>
+    <name>lens.driver.jdbc.db.uri</name>
+    <value>jdbc:hsqldb:./target/db-storage.db;MODE=MYSQL</value>
+  </property>
+  <property>
+    <name>lens.driver.jdbc.db.user</name>
+    <value>SA</value>
+  </property>
+  <property>
+    <name>lens.cube.query.driver.supported.storages</name>
+    <value>mydb</value>
+    <final>true</final>
+  </property>
+  <property>
+    <name>lens.driver.jdbc.query.rewriter</name>
+    <value>org.apache.lens.driver.jdbc.ColumnarSQLRewriter</value>
+  </property>
+  <property>
+    <name>lens.driver.jdbc.explain.keyword</name>
+    <value>explain plan for</value>
+  </property>
+  <property>
+    <name>lens.driver.test.key</name>
+    <value>set</value>
+  </property>
+  <property>
+    <name>lens.driver.test.drivername</name>
+    <value>jdbc/jdbc1</value>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/resources/drivers/mock/fail1/failing-query-driver-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/drivers/mock/fail1/failing-query-driver-site.xml b/lens-server/src/test/resources/drivers/mock/fail1/failing-query-driver-site.xml
new file mode 100644
index 0000000..b836282
--- /dev/null
+++ b/lens-server/src/test/resources/drivers/mock/fail1/failing-query-driver-site.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~ http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+  <property>
+    <name>lens.driver.test.key</name>
+    <value>set</value>
+  </property>
+
+  <property>
+    <name>lens.driver.test.drivername</name>
+    <value>mock/fail1</value>
+  </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/resources/drivers/mockHive/mockHive1/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/drivers/mockHive/mockHive1/hivedriver-site.xml b/lens-server/src/test/resources/drivers/mockHive/mockHive1/hivedriver-site.xml
new file mode 100644
index 0000000..723e9a1
--- /dev/null
+++ b/lens-server/src/test/resources/drivers/mockHive/mockHive1/hivedriver-site.xml
@@ -0,0 +1,95 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+  <property>
+    <name>lens.driver.hive.connection.class</name>
+    <value>org.apache.lens.driver.hive.RemoteThriftConnection</value>
+    <description>The connection class from HiveDriver to HiveServer.</description>
+  </property>
+
+  <property>
+     
+    <name>hive.metastore.local</name>
+     
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.metastore.warehouse.dir</name>
+    <value>${project.build.directory}/hive/warehouse</value>
+  </property>
+
+  <property>
+    <name>javax.jdo.option.ConnectionURL</name>
+    <value>jdbc:derby:;databaseName=target/metastore_db;create=true</value>
+    <description>JDBC connect string for a JDBC metastore</description>
+  </property>
+
+  <property>
+    <name>hive.lock.manager</name>
+    <value>org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.bind.host</name>
+    <value>localhost</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.port</name>
+    <value>12345</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.connect.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>lens.driver.test.key</name>
+    <value>set</value>
+  </property>
+
+  <property>
+    <name>lens.driver.test.drivername</name>
+    <value>mockHive/mockHive1</value>
+  </property>
+
+  <property>
+    <name>driver.max.concurrent.launched.queries</name>
+    <value>2</value>
+  </property>
+
+  <property>
+    <name>lens.driver.hive.query.launching.constraint.factories</name>
+    <value>org.apache.lens.server.api.query.constraint.MaxConcurrentDriverQueriesConstraintFactory</value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/resources/drivers/mockHive/mockHive2/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/drivers/mockHive/mockHive2/hivedriver-site.xml b/lens-server/src/test/resources/drivers/mockHive/mockHive2/hivedriver-site.xml
new file mode 100644
index 0000000..50f82da
--- /dev/null
+++ b/lens-server/src/test/resources/drivers/mockHive/mockHive2/hivedriver-site.xml
@@ -0,0 +1,95 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+
+  <property>
+    <name>lens.driver.hive.connection.class</name>
+    <value>org.apache.lens.driver.hive.RemoteThriftConnection</value>
+    <description>The connection class from HiveDriver to HiveServer.</description>
+  </property>
+
+  <property>
+     
+    <name>hive.metastore.local</name>
+     
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.metastore.warehouse.dir</name>
+    <value>${project.build.directory}/hive/warehouse</value>
+  </property>
+
+  <property>
+    <name>javax.jdo.option.ConnectionURL</name>
+    <value>jdbc:derby:;databaseName=target/metastore_db;create=true</value>
+    <description>JDBC connect string for a JDBC metastore</description>
+  </property>
+
+  <property>
+    <name>hive.lock.manager</name>
+    <value>org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.bind.host</name>
+    <value>localhost</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.port</name>
+    <value>12345</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>hive.server2.thrift.client.connect.retry.limit</name>
+    <value>3</value>
+  </property>
+
+  <property>
+    <name>lens.driver.test.key</name>
+    <value>set</value>
+  </property>
+
+  <property>
+    <name>lens.driver.test.drivername</name>
+    <value>mockHive/mockHive2</value>
+  </property>
+
+  <property>
+    <name>driver.max.concurrent.launched.queries</name>
+    <value>2</value>
+  </property>
+
+  <property>
+    <name>lens.driver.hive.query.launching.constraint.factories</name>
+    <value>org.apache.lens.server.api.query.constraint.MaxConcurrentDriverQueriesConstraintFactory</value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/resources/failing-query-driver-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/failing-query-driver-site.xml b/lens-server/src/test/resources/failing-query-driver-site.xml
deleted file mode 100644
index fee022d..0000000
--- a/lens-server/src/test/resources/failing-query-driver-site.xml
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version="1.0"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~ http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing,
-  ~ software distributed under the License is distributed on an
-  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  ~ KIND, either express or implied.  See the License for the
-  ~ specific language governing permissions and limitations
-  ~ under the License.
-  -->
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<configuration>
-  <property>
-    <name>lens.driver.test.key</name>
-    <value>set</value>
-  </property>
-</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/resources/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/hivedriver-site.xml b/lens-server/src/test/resources/hivedriver-site.xml
deleted file mode 100644
index f2aed88..0000000
--- a/lens-server/src/test/resources/hivedriver-site.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<?xml version="1.0"?>
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<configuration>
-
-  <property>
-    <name>lens.driver.hive.connection.class</name>
-    <value>org.apache.lens.driver.hive.RemoteThriftConnection</value>
-    <description>The connection class from HiveDriver to HiveServer.</description>
-  </property>
-
-  <property>
-     
-    <name>hive.metastore.local</name>
-     
-    <value>true</value>
-  </property>
-
-  <property>
-    <name>hive.metastore.warehouse.dir</name>
-    <value>${project.build.directory}/hive/warehouse</value>
-  </property>
-
-  <property>
-    <name>javax.jdo.option.ConnectionURL</name>
-    <value>jdbc:derby:;databaseName=target/metastore_db;create=true</value>
-    <description>JDBC connect string for a JDBC metastore</description>
-  </property>
-
-  <property>
-    <name>hive.lock.manager</name>
-    <value>org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager</value>
-  </property>
-
-  <property>
-    <name>hive.server2.thrift.bind.host</name>
-    <value>localhost</value>
-  </property>
-
-  <property>
-    <name>hive.server2.thrift.port</name>
-    <value>12345</value>
-  </property>
-
-  <property>
-    <name>hive.server2.thrift.client.retry.limit</name>
-    <value>3</value>
-  </property>
-
-  <property>
-    <name>hive.server2.thrift.client.connect.retry.limit</name>
-    <value>3</value>
-  </property>
-
-  <property>
-    <name>lens.driver.test.key</name>
-    <value>set</value>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/resources/jdbcdriver-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/jdbcdriver-site.xml b/lens-server/src/test/resources/jdbcdriver-site.xml
deleted file mode 100644
index 1b14f54..0000000
--- a/lens-server/src/test/resources/jdbcdriver-site.xml
+++ /dev/null
@@ -1,55 +0,0 @@
-<?xml version="1.0"?>
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<configuration>
-  <property>
-    <name>lens.driver.jdbc.driver.class</name>
-    <value>org.hsqldb.jdbcDriver</value>
-  </property>
-  <property>
-    <name>lens.driver.jdbc.db.uri</name>
-    <value>jdbc:hsqldb:./target/db-storage.db;MODE=MYSQL</value>
-  </property>
-  <property>
-    <name>lens.driver.jdbc.db.user</name>
-    <value>SA</value>
-  </property>
-  <property>
-    <name>lens.cube.query.driver.supported.storages</name>
-    <value>mydb</value>
-    <final>true</final>
-  </property>
-  <property>
-    <name>lens.driver.jdbc.query.rewriter</name>
-    <value>org.apache.lens.driver.jdbc.ColumnarSQLRewriter</value>
-  </property>
-  <property>
-    <name>lens.driver.jdbc.explain.keyword</name>
-    <value>explain plan for</value>
-  </property>
-  <property>
-    <name>lens.driver.test.key</name>
-    <value>set</value>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/lens-server/src/test/resources/lens-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/lens-site.xml b/lens-server/src/test/resources/lens-site.xml
index cc887ef..9cb4a6f 100644
--- a/lens-server/src/test/resources/lens-site.xml
+++ b/lens-server/src/test/resources/lens-site.xml
@@ -24,8 +24,8 @@
 <configuration>
   <property>
     <name>lens.server.drivers</name>
-    <value>org.apache.lens.driver.hive.HiveDriver,org.apache.lens.driver.jdbc.JDBCDriver,
-      org.apache.lens.server.common.FailingQueryDriver</value>
+    <value>hive:org.apache.lens.driver.hive.HiveDriver,jdbc:org.apache.lens.driver.jdbc.JDBCDriver,
+      mock:org.apache.lens.server.common.FailingQueryDriver</value> <!--$LENS_CONF/drivers-->
   </property>
 
   <property>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/src/site/apt/admin/config-server.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/admin/config-server.apt b/src/site/apt/admin/config-server.apt
index 141f2b3..fea2336 100644
--- a/src/site/apt/admin/config-server.apt
+++ b/src/site/apt/admin/config-server.apt
@@ -31,10 +31,39 @@ Configuring lens server
 
 * Driver configuration
 
-  The supported drivers are configured through classnames in this release. The configuration 
-  for each driver can be specified in their site.xml file. For example, HiveDriver's
-  configuration should be specified in hivedriver-site.xml. Supported drivers in this version
-  are org.apache.lens.driver.hive.HiveDriver, org.apache.lens.driver.jdbc.JDBCDriver
+  The supported drivers are configured through type and classname in this release. The property 
+  lens.server.drivers in server configuration specifies the driver types supported by the system and 
+  their corresponding driver implementation.
+
+  System can have multiple drivers of same type, for example MySQL and Postgres can be two drivers of
+  type JDBC. The configuration for each driver is specified in its site.xml file which should be 
+  present under the folder <Server_Conf_location>/drivers/<driver-type>/<driver-name>. 
+  For example, Hive driver's configuration should be specified in 
+  <LensServer_Install_Location>/conf/drivers/hive/hive1/hivedriver-site.xml where hive1 is the driver name.
+  A driver is identified by its fully qualified name which includes the driver type and driver name. For the
+  example discussed above the fully qualified name would be "hive/hive1"
+  
+  The organization of drivers in the system is illustrated below 
+
++---+
+
+  ├── conf
+      └── drivers
+          ├── hive
+          │   ├── hive1
+          │   │   └── hivedriver-site.xml
+          │   └── hive2
+          │       └── hivedriver-site.xml
+          └── jdbc
+              ├── jdbc1
+              │   └── jdbcdriver-site.xml
+              └── jdbc2
+                  └── jdbcdriver-site.xml
+
++---+
+
+  Supported drivers in this version are 
+  org.apache.lens.driver.hive.HiveDriver, org.apache.lens.driver.jdbc.JDBCDriver
   and org.apache.lens.driver.es.ESDriver.
   The configuration parameters and their description are explained in their respective docs listed below
   HiveDriver - {{{./hivedriver-config.html} here}}

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/src/site/apt/admin/config.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/admin/config.apt b/src/site/apt/admin/config.apt
index 88c1489..bcf4b7d 100644
--- a/src/site/apt/admin/config.apt
+++ b/src/site/apt/admin/config.apt
@@ -65,7 +65,7 @@ Lens server configuration
 *--+--+---+--+
 |19|lens.server.driver.selector.class|org.apache.lens.server.api.driver.MinQueryCostSelector|Class for selecting best driver given the query context|
 *--+--+---+--+
-|20|lens.server.drivers|org.apache.lens.driver.hive.HiveDriver|Drivers enabled for this lens server instance|
+|20|lens.server.drivers|hive:org.apache.lens.driver.hive.HiveDriver|Drivers enabled for this lens server instance|
 *--+--+---+--+
 |21|lens.server.enable.console.metrics|false|Enable metrics to be reported on console|
 *--+--+---+--+

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/src/site/apt/lenshome/install-and-run.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/lenshome/install-and-run.apt b/src/site/apt/lenshome/install-and-run.apt
index 9eadc5c..961d926 100644
--- a/src/site/apt/lenshome/install-and-run.apt
+++ b/src/site/apt/lenshome/install-and-run.apt
@@ -98,14 +98,24 @@ Installing and Running Lens
   │   ├── lens-config.sh
   │   └── lens-ctl
   ├── conf
-  │   ├── hivedriver-site.xml
-  │   ├── jdbcdriver-site.xml
+  │   ├── drivers
+  │   │   ├── hive
+  │   │	  │   └── hive1
+  │   │	  │       └── hivedriver-site.xml
+  │   │	  └── jdbc
+  │   │	      └── jdbc1
+  │   │	          └── jdbcdriver-site.xml
   │   ├── lens-env.sh
   │   ├── lens-site.xml
   │   └── logback.xml
   ├── conf-pseudo-distr
-  │   ├── hivedriver-site.xml
-  │   ├── jdbcdriver-site.xml
+  │   ├── drivers
+  │   │   ├── hive
+  │   │	  │   └── hive1
+  │   │	  │       └── hivedriver-site.xml
+  │   │	  └── jdbc
+  │   │	      └── jdbc1
+  │   │	          └── jdbcdriver-site.xml
   │   ├── lens-env.sh
   │   ├── lens-site.xml
   │   └── logback.xml

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/tools/conf-pseudo-distr/server/drivers/hive/hive1/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf-pseudo-distr/server/drivers/hive/hive1/hivedriver-site.xml b/tools/conf-pseudo-distr/server/drivers/hive/hive1/hivedriver-site.xml
new file mode 100644
index 0000000..4804356
--- /dev/null
+++ b/tools/conf-pseudo-distr/server/drivers/hive/hive1/hivedriver-site.xml
@@ -0,0 +1,57 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+<property>
+  <name>lens.driver.hive.connection.class</name>
+  <value>org.apache.lens.driver.hive.RemoteThriftConnection</value>
+  <description>The connection class from HiveDriver to HiveServer. The default is
+   an embedded connection which does not require a remote hive server.
+   For connecting to a hiveserver end point, remote connection should be used. 
+  The possible values are org.apache.lens.driver.hive.EmbeddedThriftConnection
+   and org.apache.lens.driver.hive.RemoteThriftConnection. </description>
+</property>
+
+<property>
+  <name>hive.server2.thrift.bind.host</name>
+  <value>localhost</value>
+  <description>The host on which hive server is running</description>
+</property>
+
+<property>
+  <name>hive.server2.thrift.port</name>
+  <value>10000</value>
+  <description>The port on which hive server is running</description>
+</property>
+
+<!-- Adding in supported storages by hive driver -->
+<property>
+  <name>lens.cube.query.driver.supported.storages</name>
+  <value>local,cluster</value>
+  <final>true</final>
+</property>
+<property>
+  <name>hive.aux.jars.path</name>
+  <value>file:///opt/lens/lens-ml-dist/target/apache-lens-2.5.0-beta-SNAPSHOT-ml/lib/lens-ml-lib-2.5.0-beta-SNAPSHOT.jar,file:///usr/local/spark-1.3.0-bin-hadoop2.4/lib/spark-assembly-1.3.0-hadoop2.4.0.jar</value>
+</property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/tools/conf-pseudo-distr/server/drivers/jdbc/jdbc1/jdbcdriver-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf-pseudo-distr/server/drivers/jdbc/jdbc1/jdbcdriver-site.xml b/tools/conf-pseudo-distr/server/drivers/jdbc/jdbc1/jdbcdriver-site.xml
new file mode 100644
index 0000000..7fad125
--- /dev/null
+++ b/tools/conf-pseudo-distr/server/drivers/jdbc/jdbc1/jdbcdriver-site.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+  <property>
+    <name>lens.driver.jdbc.driver.class</name>
+    <value>jdbc:org.hsqldb.jdbcDriver</value>
+  </property>
+  <property>
+    <name>lens.driver.jdbc.db.uri</name>
+    <value>jdbc:hsqldb:/tmp/db-storage.db;MODE=MYSQL;readonly=true</value>
+  </property>
+  <property>
+    <name>lens.driver.jdbc.db.user</name>
+    <value>SA</value>
+  </property>
+  <property>
+    <name>lens.cube.query.driver.supported.storages</name>
+    <value>mydb</value>
+    <final>true</final>
+  </property>
+  <property>
+    <name>lens.driver.jdbc.query.rewriter</name>
+    <value>org.apache.lens.driver.jdbc.ColumnarSQLRewriter</value>
+  </property>
+  <property>
+    <name>lens.driver.jdbc.explain.keyword</name>
+    <value>explain plan for </value>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/tools/conf-pseudo-distr/server/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf-pseudo-distr/server/hivedriver-site.xml b/tools/conf-pseudo-distr/server/hivedriver-site.xml
deleted file mode 100644
index 4804356..0000000
--- a/tools/conf-pseudo-distr/server/hivedriver-site.xml
+++ /dev/null
@@ -1,57 +0,0 @@
-<?xml version="1.0"?>
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<configuration>
-<property>
-  <name>lens.driver.hive.connection.class</name>
-  <value>org.apache.lens.driver.hive.RemoteThriftConnection</value>
-  <description>The connection class from HiveDriver to HiveServer. The default is
-   an embedded connection which does not require a remote hive server.
-   For connecting to a hiveserver end point, remote connection should be used. 
-  The possible values are org.apache.lens.driver.hive.EmbeddedThriftConnection
-   and org.apache.lens.driver.hive.RemoteThriftConnection. </description>
-</property>
-
-<property>
-  <name>hive.server2.thrift.bind.host</name>
-  <value>localhost</value>
-  <description>The host on which hive server is running</description>
-</property>
-
-<property>
-  <name>hive.server2.thrift.port</name>
-  <value>10000</value>
-  <description>The port on which hive server is running</description>
-</property>
-
-<!-- Adding in supported storages by hive driver -->
-<property>
-  <name>lens.cube.query.driver.supported.storages</name>
-  <value>local,cluster</value>
-  <final>true</final>
-</property>
-<property>
-  <name>hive.aux.jars.path</name>
-  <value>file:///opt/lens/lens-ml-dist/target/apache-lens-2.5.0-beta-SNAPSHOT-ml/lib/lens-ml-lib-2.5.0-beta-SNAPSHOT.jar,file:///usr/local/spark-1.3.0-bin-hadoop2.4/lib/spark-assembly-1.3.0-hadoop2.4.0.jar</value>
-</property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/tools/conf-pseudo-distr/server/jdbcdriver-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf-pseudo-distr/server/jdbcdriver-site.xml b/tools/conf-pseudo-distr/server/jdbcdriver-site.xml
deleted file mode 100644
index 37540dd..0000000
--- a/tools/conf-pseudo-distr/server/jdbcdriver-site.xml
+++ /dev/null
@@ -1,50 +0,0 @@
-<?xml version="1.0"?>
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<configuration>
-  <property>
-    <name>lens.driver.jdbc.driver.class</name>
-    <value>org.hsqldb.jdbcDriver</value>
-  </property>
-  <property>
-    <name>lens.driver.jdbc.db.uri</name>
-    <value>jdbc:hsqldb:/tmp/db-storage.db;MODE=MYSQL;readonly=true</value>
-  </property>
-  <property>
-    <name>lens.driver.jdbc.db.user</name>
-    <value>SA</value>
-  </property>
-  <property>
-    <name>lens.cube.query.driver.supported.storages</name>
-    <value>mydb</value>
-    <final>true</final>
-  </property>
-  <property>
-    <name>lens.driver.jdbc.query.rewriter</name>
-    <value>org.apache.lens.driver.jdbc.ColumnarSQLRewriter</value>
-  </property>
-  <property>
-    <name>lens.driver.jdbc.explain.keyword</name>
-    <value>explain plan for </value>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/tools/conf-pseudo-distr/server/lens-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf-pseudo-distr/server/lens-site.xml b/tools/conf-pseudo-distr/server/lens-site.xml
index f43d07e..dfb9d21 100644
--- a/tools/conf-pseudo-distr/server/lens-site.xml
+++ b/tools/conf-pseudo-distr/server/lens-site.xml
@@ -31,7 +31,7 @@
 
 <property>
   <name>lens.server.drivers</name>
-  <value>org.apache.lens.driver.hive.HiveDriver,org.apache.lens.driver.jdbc.JDBCDriver</value>
+  <value>hive:org.apache.lens.driver.hive.HiveDriver,jdbc:org.apache.lens.driver.jdbc.JDBCDriver</value>
 </property>
 
 <property>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/tools/conf/server/drivers/hive/hive1/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf/server/drivers/hive/hive1/hivedriver-site.xml b/tools/conf/server/drivers/hive/hive1/hivedriver-site.xml
new file mode 100644
index 0000000..2e8e7fa
--- /dev/null
+++ b/tools/conf/server/drivers/hive/hive1/hivedriver-site.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+<property>
+  <name>hive.metastore.warehouse.dir</name>
+  <value>/tmp/hive/warehouse</value>
+</property>
+
+<property>
+  <name>hive.lock.manager</name>
+  <value>org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager</value>
+</property>
+
+<!-- Adding in supported storages by hive driver -->
+<property>
+  <name>lens.cube.query.driver.supported.storages</name>
+  <value>local,cluster</value>
+  <final>true</final>
+</property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/tools/conf/server/drivers/jdbc/jdbc1/jdbcdriver-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf/server/drivers/jdbc/jdbc1/jdbcdriver-site.xml b/tools/conf/server/drivers/jdbc/jdbc1/jdbcdriver-site.xml
new file mode 100644
index 0000000..37540dd
--- /dev/null
+++ b/tools/conf/server/drivers/jdbc/jdbc1/jdbcdriver-site.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<configuration>
+  <property>
+    <name>lens.driver.jdbc.driver.class</name>
+    <value>org.hsqldb.jdbcDriver</value>
+  </property>
+  <property>
+    <name>lens.driver.jdbc.db.uri</name>
+    <value>jdbc:hsqldb:/tmp/db-storage.db;MODE=MYSQL;readonly=true</value>
+  </property>
+  <property>
+    <name>lens.driver.jdbc.db.user</name>
+    <value>SA</value>
+  </property>
+  <property>
+    <name>lens.cube.query.driver.supported.storages</name>
+    <value>mydb</value>
+    <final>true</final>
+  </property>
+  <property>
+    <name>lens.driver.jdbc.query.rewriter</name>
+    <value>org.apache.lens.driver.jdbc.ColumnarSQLRewriter</value>
+  </property>
+  <property>
+    <name>lens.driver.jdbc.explain.keyword</name>
+    <value>explain plan for </value>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/tools/conf/server/hivedriver-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf/server/hivedriver-site.xml b/tools/conf/server/hivedriver-site.xml
deleted file mode 100644
index 2e8e7fa..0000000
--- a/tools/conf/server/hivedriver-site.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0"?>
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<configuration>
-<property>
-  <name>hive.metastore.warehouse.dir</name>
-  <value>/tmp/hive/warehouse</value>
-</property>
-
-<property>
-  <name>hive.lock.manager</name>
-  <value>org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager</value>
-</property>
-
-<!-- Adding in supported storages by hive driver -->
-<property>
-  <name>lens.cube.query.driver.supported.storages</name>
-  <value>local,cluster</value>
-  <final>true</final>
-</property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/tools/conf/server/jdbcdriver-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf/server/jdbcdriver-site.xml b/tools/conf/server/jdbcdriver-site.xml
deleted file mode 100644
index 37540dd..0000000
--- a/tools/conf/server/jdbcdriver-site.xml
+++ /dev/null
@@ -1,50 +0,0 @@
-<?xml version="1.0"?>
-<!--
-
-    Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
--->
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-
-<configuration>
-  <property>
-    <name>lens.driver.jdbc.driver.class</name>
-    <value>org.hsqldb.jdbcDriver</value>
-  </property>
-  <property>
-    <name>lens.driver.jdbc.db.uri</name>
-    <value>jdbc:hsqldb:/tmp/db-storage.db;MODE=MYSQL;readonly=true</value>
-  </property>
-  <property>
-    <name>lens.driver.jdbc.db.user</name>
-    <value>SA</value>
-  </property>
-  <property>
-    <name>lens.cube.query.driver.supported.storages</name>
-    <value>mydb</value>
-    <final>true</final>
-  </property>
-  <property>
-    <name>lens.driver.jdbc.query.rewriter</name>
-    <value>org.apache.lens.driver.jdbc.ColumnarSQLRewriter</value>
-  </property>
-  <property>
-    <name>lens.driver.jdbc.explain.keyword</name>
-    <value>explain plan for </value>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/114dab34/tools/conf/server/lens-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf/server/lens-site.xml b/tools/conf/server/lens-site.xml
index 2b12b83..0803da1 100644
--- a/tools/conf/server/lens-site.xml
+++ b/tools/conf/server/lens-site.xml
@@ -24,7 +24,7 @@
 <configuration>
 <property>
   <name>lens.server.drivers</name>
-  <value>org.apache.lens.driver.hive.HiveDriver,org.apache.lens.driver.jdbc.JDBCDriver</value>
+  <value>hive:org.apache.lens.driver.hive.HiveDriver,jdbc:org.apache.lens.driver.jdbc.JDBCDriver</value>
 </property>
 
 <property>


[25/50] [abbrv] lens git commit: LENS-883 : httpresultset api should return 404 when resultset file does not exist

Posted by sh...@apache.org.
LENS-883 : httpresultset api should return 404 when resultset file does not exist


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/942f071f
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/942f071f
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/942f071f

Branch: refs/heads/LENS-581
Commit: 942f071f9925b1265a67a1f7b2ab437127485a09
Parents: 5d79ecf
Author: Deepak Barr <de...@gmail.com>
Authored: Thu Nov 26 13:51:14 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Thu Nov 26 13:51:14 2015 +0530

----------------------------------------------------------------------
 .../apache/lens/server/query/QueryExecutionServiceImpl.java  | 8 ++++++++
 1 file changed, 8 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/942f071f/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index 941b1e5..7201e0d 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -2485,6 +2485,14 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
       throw new NotFoundException("http result not available");
     }
     final Path resultPath = new Path(resultSet.getOutputPath());
+    try {
+      FileSystem fs = resultPath.getFileSystem(conf);
+      if (!fs.exists(resultPath)) {
+        throw new NotFoundException("Result file does not exist!");
+      }
+    } catch (IOException e) {
+      throw new LensException(e);
+    }
     final QueryContext ctx = getQueryContext(sessionHandle, queryHandle);
     String resultFSReadUrl = conf.get(RESULT_FS_READ_URL);
     if (resultFSReadUrl != null) {


[49/50] [abbrv] lens git commit: LENS-907 : Subsequent calls to metastore API to fetch native tables throws error after the first call

Posted by sh...@apache.org.
LENS-907 : Subsequent calls to metastore API to fetch native tables throws error after the first call


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/4d7c8e4d
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/4d7c8e4d
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/4d7c8e4d

Branch: refs/heads/LENS-581
Commit: 4d7c8e4db3e9aa81da042921a31dfd670982ce38
Parents: 7a89db1
Author: Deepak Barr <de...@apache.org>
Authored: Thu Dec 24 17:14:31 2015 +0530
Committer: Deepak Kumar Barr <de...@apache.org>
Committed: Thu Dec 24 17:14:31 2015 +0530

----------------------------------------------------------------------
 .../apache/lens/server/metastore/CubeMetastoreServiceImpl.java   | 4 ----
 1 file changed, 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/4d7c8e4d/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
index 66ed938..cf49a13 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
@@ -1212,10 +1212,6 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
       return result;
     } catch (Exception e) {
       throw new LensException("Error getting native tables from DB", e);
-    } finally {
-      if (null != msc) {
-        msc.close();
-      }
     }
   }
 


[02/50] [abbrv] lens git commit: LENS-862: Update HQLParser.printAST() to print Char Position In Line

Posted by sh...@apache.org.
LENS-862: Update HQLParser.printAST() to print Char Position In Line


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/be36b630
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/be36b630
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/be36b630

Branch: refs/heads/LENS-581
Commit: be36b63034d8ba62ed491e92a53fa40fd0896e4e
Parents: ffc9987
Author: Puneet Gupta <pu...@gmail.com>
Authored: Mon Nov 16 11:39:04 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Mon Nov 16 11:39:04 2015 +0530

----------------------------------------------------------------------
 lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/be36b630/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index ea9badd..16e1aa3 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -194,7 +194,7 @@ public final class HQLParser {
     }
 
     System.out.print(node.getText() + " [" + tokenMapping.get(node.getToken().getType()) + "]");
-    System.out.print(" (l" + level + "c" + child + ")");
+    System.out.print(" (l" + level + "c" + child + "p" + node.getCharPositionInLine() +")");
 
     if (node.getChildCount() > 0) {
       System.out.println(" {");


[23/50] [abbrv] lens git commit: LENS-869 : Fix DefaultEstimatedQueryCollection.remove method

Posted by sh...@apache.org.
LENS-869 : Fix DefaultEstimatedQueryCollection.remove method


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/b71be2dc
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/b71be2dc
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/b71be2dc

Branch: refs/heads/LENS-581
Commit: b71be2dc906688b6f283bb16afa778d19fc1c0bf
Parents: 8704956
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Wed Nov 25 18:39:07 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Nov 25 18:39:07 2015 +0530

----------------------------------------------------------------------
 .../lens/cli/TestLensNativeTableCommands.java   |   6 +-
 .../org/apache/lens/driver/hive/HiveDriver.java |   6 +-
 .../lens/server/api/LensServerAPITestUtil.java  |  47 +++
 .../api/query/TestAbstractQueryContext.java     |   2 +-
 .../lens/server/api/util/TestLensUtil.java      |  24 --
 .../org/apache/lens/server/LensServerConf.java  |   1 -
 .../server/query/QueryExecutionServiceImpl.java |  18 +-
 .../DefaultEstimatedQueryCollection.java        |   4 +-
 .../query/collect/DefaultQueryCollection.java   |   2 +
 .../ThreadSafeEstimatedQueryCollection.java     |  13 +-
 .../org/apache/lens/server/LensJerseyTest.java  |   8 +-
 .../apache/lens/server/LensServerTestUtil.java  | 298 +++++++++++++++++++
 .../org/apache/lens/server/LensTestUtil.java    | 298 -------------------
 .../org/apache/lens/server/TestServerMode.java  |   2 +-
 .../apache/lens/server/TestServerRestart.java   |  14 +-
 .../server/metastore/TestMetastoreService.java  |   4 +-
 .../metrics/TestResourceMethodMetrics.java      |   8 +-
 .../lens/server/query/TestQueryConstraints.java | 285 ++++++++++++++++++
 .../server/query/TestQueryEndEmailNotifier.java |  10 +-
 .../lens/server/query/TestQueryService.java     |  53 ++--
 .../lens/server/query/TestResultFormatting.java |  38 ++-
 .../session/TestDatabaseResourceService.java    |   4 +-
 .../server/session/TestSessionClassLoaders.java |   4 +-
 23 files changed, 746 insertions(+), 403 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-cli/src/test/java/org/apache/lens/cli/TestLensNativeTableCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensNativeTableCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensNativeTableCommands.java
index 40a473a..d453803 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensNativeTableCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensNativeTableCommands.java
@@ -20,7 +20,7 @@ package org.apache.lens.cli;
 
 import org.apache.lens.cli.commands.LensNativeTableCommands;
 import org.apache.lens.client.LensClient;
-import org.apache.lens.server.LensTestUtil;
+import org.apache.lens.server.LensServerTestUtil;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -50,7 +50,7 @@ public class TestLensNativeTableCommands extends LensCliApplicationTest {
       LOG.debug("Starting to test nativetable commands");
       String tblList = command.showNativeTables();
       Assert.assertFalse(tblList.contains("test_native_table_command"));
-      LensTestUtil.createHiveTable("test_native_table_command");
+      LensServerTestUtil.createHiveTable("test_native_table_command");
       tblList = command.showNativeTables();
       Assert.assertTrue(tblList.contains("test_native_table_command"));
 
@@ -61,7 +61,7 @@ public class TestLensNativeTableCommands extends LensCliApplicationTest {
       Assert.assertTrue(desc.contains("MANAGED_TABLE"));
       Assert.assertTrue(desc.contains("test.hive.table.prop"));
     } finally {
-      LensTestUtil.dropHiveTable("test_native_table_command");
+      LensServerTestUtil.dropHiveTable("test_native_table_command");
 
     }
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index c96ef20..19c4793 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -95,7 +95,7 @@ public class HiveDriver implements LensDriver {
   public static final String HS2_PRIORITY_DEFAULT_RANGES = "VERY_HIGH,7.0,HIGH,30.0,NORMAL,90,LOW";
   public static final String SESSION_KEY_DELIMITER = ".";
 
-  private static final String QUERY_LAUNCHIG_CONSTRAINT_FACTORIES_KEY
+  public static final String QUERY_LAUNCHING_CONSTRAINT_FACTORIES_KEY
     = "lens.driver.hive.query.launching.constraint.factories";
 
   private static final String WAITING_QUERIES_SELECTION_POLICY_FACTORIES_KEY
@@ -143,7 +143,7 @@ public class HiveDriver implements LensDriver {
   private DriverQueryHook queryHook;
 
   @Getter
-  private ImmutableSet<QueryLaunchingConstraint> queryConstraints;
+  protected ImmutableSet<QueryLaunchingConstraint> queryConstraints;
   private ImmutableSet<WaitingQueriesSelectionPolicy> selectionPolicies;
 
   private String sessionDbKey(String sessionHandle, String database) {
@@ -364,7 +364,7 @@ public class HiveDriver implements LensDriver {
     } catch (InstantiationException | IllegalAccessException e) {
       throw new LensException("Can't instantiate driver query hook for hivedriver with given class", e);
     }
-    queryConstraints = getImplementations(QUERY_LAUNCHIG_CONSTRAINT_FACTORIES_KEY, driverConf);
+    queryConstraints = getImplementations(QUERY_LAUNCHING_CONSTRAINT_FACTORIES_KEY, driverConf);
     selectionPolicies = getImplementations(WAITING_QUERIES_SELECTION_POLICY_FACTORIES_KEY, driverConf);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server-api/src/test/java/org/apache/lens/server/api/LensServerAPITestUtil.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/test/java/org/apache/lens/server/api/LensServerAPITestUtil.java b/lens-server-api/src/test/java/org/apache/lens/server/api/LensServerAPITestUtil.java
new file mode 100644
index 0000000..84139a4
--- /dev/null
+++ b/lens-server-api/src/test/java/org/apache/lens/server/api/LensServerAPITestUtil.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.server.api;
+
+import org.apache.lens.api.LensConf;
+
+import org.apache.hadoop.conf.Configuration;
+
+public class LensServerAPITestUtil {
+  private LensServerAPITestUtil() {
+
+  }
+
+  public static Configuration getConfiguration(Object... args) {
+    Configuration conf = new Configuration(false);
+    assert (args.length % 2 == 0);
+    for (int i = 0; i < args.length; i += 2) {
+      conf.set(args[i].toString(), args[i + 1].toString());
+    }
+    return conf;
+  }
+
+  public static LensConf getLensConf(Object... args) {
+    assert (args.length % 2 == 0);
+    LensConf conf = new LensConf();
+    for (int i = 0; i < args.length; i += 2) {
+      conf.addProperty(args[i], args[i + 1]);
+    }
+    return conf;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java b/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
index e41f2f4..02b652e 100644
--- a/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
+++ b/lens-server-api/src/test/java/org/apache/lens/server/api/query/TestAbstractQueryContext.java
@@ -20,7 +20,7 @@ package org.apache.lens.server.api.query;
 
 import static org.apache.lens.api.Priority.HIGH;
 import static org.apache.lens.server.api.LensConfConstants.*;
-import static org.apache.lens.server.api.util.TestLensUtil.getConfiguration;
+import static org.apache.lens.server.api.LensServerAPITestUtil.getConfiguration;
 
 import static org.testng.Assert.*;
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server-api/src/test/java/org/apache/lens/server/api/util/TestLensUtil.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/test/java/org/apache/lens/server/api/util/TestLensUtil.java b/lens-server-api/src/test/java/org/apache/lens/server/api/util/TestLensUtil.java
index a6acb7d..caee12f 100644
--- a/lens-server-api/src/test/java/org/apache/lens/server/api/util/TestLensUtil.java
+++ b/lens-server-api/src/test/java/org/apache/lens/server/api/util/TestLensUtil.java
@@ -18,14 +18,8 @@
  */
 package org.apache.lens.server.api.util;
 
-import static org.testng.Assert.assertEquals;
-
 import java.io.IOException;
 
-import org.apache.lens.api.LensConf;
-
-import org.apache.hadoop.conf.Configuration;
-
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -61,22 +55,4 @@ public class TestLensUtil {
     }
     Assert.assertEquals(LensUtil.getCauseMessage(th), "run time exception");
   }
-
-  public static Configuration getConfiguration(Object... args) {
-    Configuration conf = new Configuration();
-    Assert.assertEquals(args.length % 2, 0, "Odd number of arguments not supported");
-    for (int i = 0; i < args.length; i += 2) {
-      conf.set(args[i].toString(), args[i + 1].toString());
-    }
-    return conf;
-  }
-
-  public static LensConf getLensConf(Object... args) {
-    assertEquals(args.length % 2, 0);
-    LensConf conf = new LensConf();
-    for (int i = 0; i < args.length; i += 2) {
-      conf.addProperty(args[i], args[i + 1]);
-    }
-    return conf;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/main/java/org/apache/lens/server/LensServerConf.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/LensServerConf.java b/lens-server/src/main/java/org/apache/lens/server/LensServerConf.java
index 6db720d..e977ebd 100644
--- a/lens-server/src/main/java/org/apache/lens/server/LensServerConf.java
+++ b/lens-server/src/main/java/org/apache/lens/server/LensServerConf.java
@@ -52,7 +52,6 @@ public final class LensServerConf {
           OVERRIDING_CONF_FOR_DRIVER.set(prop.getKey(), prop.getValue());
         }
       }
-
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index fdc8bfd..941b1e5 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -86,9 +86,7 @@ import org.slf4j.LoggerFactory;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableSet;
-import lombok.Getter;
-import lombok.NonNull;
-import lombok.ToString;
+import lombok.*;
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -141,6 +139,7 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
   /**
    * The launched queries.
    */
+  @Getter
   private EstimatedQueryCollection launchedQueries;
 
   /**
@@ -2569,6 +2568,19 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
     return finishedQueries.size();
   }
 
+  @Data
+  @AllArgsConstructor
+  public static class QueryCount {
+    long running, queued, waiting;
+  }
+
+  public QueryCount getQueryCountSnapshot() {
+    removalFromLaunchedQueriesLock.lock();
+    QueryCount count = new QueryCount(getRunningQueriesCount(), getQueuedQueriesCount(), getWaitingQueriesCount());
+    removalFromLaunchedQueriesLock.unlock();
+    return count;
+  }
+
   /**
    * Handle driver session start.
    *

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/main/java/org/apache/lens/server/query/collect/DefaultEstimatedQueryCollection.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/collect/DefaultEstimatedQueryCollection.java b/lens-server/src/main/java/org/apache/lens/server/query/collect/DefaultEstimatedQueryCollection.java
index 908b86f..4e5aa68 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/collect/DefaultEstimatedQueryCollection.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/collect/DefaultEstimatedQueryCollection.java
@@ -37,6 +37,7 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Sets;
 import lombok.NonNull;
+import lombok.ToString;
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -48,6 +49,7 @@ import lombok.extern.slf4j.Slf4j;
  *
  */
 @Slf4j
+@ToString
 public class DefaultEstimatedQueryCollection implements EstimatedQueryCollection {
 
   private final QueryCollection queries;
@@ -109,7 +111,7 @@ public class DefaultEstimatedQueryCollection implements EstimatedQueryCollection
    */
   @Override
   public boolean remove(QueryContext query) {
-    this.queriesByDriver.remove(query.getSelectedDriver());
+    this.queriesByDriver.remove(query.getSelectedDriver(), query);
     return this.queries.remove(query);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/main/java/org/apache/lens/server/query/collect/DefaultQueryCollection.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/collect/DefaultQueryCollection.java b/lens-server/src/main/java/org/apache/lens/server/query/collect/DefaultQueryCollection.java
index 844237a..6f4230b 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/collect/DefaultQueryCollection.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/collect/DefaultQueryCollection.java
@@ -27,6 +27,7 @@ import org.apache.commons.collections.map.MultiValueMap;
 
 import com.google.common.collect.Sets;
 import lombok.NonNull;
+import lombok.ToString;
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -36,6 +37,7 @@ import lombok.extern.slf4j.Slf4j;
  * @see QueryCollection
  */
 @Slf4j
+@ToString
 public class DefaultQueryCollection implements QueryCollection {
 
   private final Set<QueryContext> queries;

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/main/java/org/apache/lens/server/query/collect/ThreadSafeEstimatedQueryCollection.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/collect/ThreadSafeEstimatedQueryCollection.java b/lens-server/src/main/java/org/apache/lens/server/query/collect/ThreadSafeEstimatedQueryCollection.java
index ca24a8b..0e68395 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/collect/ThreadSafeEstimatedQueryCollection.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/collect/ThreadSafeEstimatedQueryCollection.java
@@ -50,22 +50,22 @@ public class ThreadSafeEstimatedQueryCollection implements EstimatedQueryCollect
   }
 
   @Override
-  public synchronized  QueryCost getTotalQueryCost(String user) {
+  public synchronized QueryCost getTotalQueryCost(String user) {
     return this.estimatedQueries.getTotalQueryCost(user);
   }
 
   @Override
-  public synchronized  Set<QueryContext> getQueries() {
+  public synchronized Set<QueryContext> getQueries() {
     return this.estimatedQueries.getQueries();
   }
 
   @Override
-  public synchronized  Set<QueryContext> getQueries(String user) {
+  public synchronized Set<QueryContext> getQueries(String user) {
     return this.estimatedQueries.getQueries(user);
   }
 
   @Override
-  public synchronized  int getQueriesCount() {
+  public synchronized int getQueriesCount() {
     return this.estimatedQueries.getQueriesCount();
   }
 
@@ -93,4 +93,9 @@ public class ThreadSafeEstimatedQueryCollection implements EstimatedQueryCollect
   public synchronized boolean removeAll(Set<QueryContext> queries) {
     return this.estimatedQueries.removeAll(queries);
   }
+
+  @Override
+  public synchronized String toString() {
+    return getClass().getSimpleName() + "(estimatedQueries=" + this.estimatedQueries + ")";
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java b/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
index 3dad050..8ba9353 100644
--- a/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
+++ b/lens-server/src/test/java/org/apache/lens/server/LensJerseyTest.java
@@ -18,6 +18,10 @@
  */
 package org.apache.lens.server;
 
+import static org.apache.lens.server.LensServerTestUtil.DB_WITH_JARS;
+import static org.apache.lens.server.LensServerTestUtil.DB_WITH_JARS_2;
+import static org.apache.lens.server.LensServerTestUtil.createTestDatabaseResources;
+
 import static org.testng.Assert.*;
 
 import java.io.IOException;
@@ -123,10 +127,10 @@ public abstract class LensJerseyTest extends JerseyTest {
     hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_CONNECTION_RETRY_LIMIT, 3);
     hiveConf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_RETRY_LIMIT, 3);
 
-    LensTestUtil.createTestDatabaseResources(new String[]{LensTestUtil.DB_WITH_JARS, LensTestUtil.DB_WITH_JARS_2},
+    createTestDatabaseResources(new String[]{DB_WITH_JARS, DB_WITH_JARS_2},
       hiveConf);
 
-    LensServices.get().init(LensServerConf.getHiveConf());
+    LensServices.get().init(getServerConf());
     LensServices.get().start();
 
     // Check if mock service is started

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java b/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java
new file mode 100644
index 0000000..57bedee
--- /dev/null
+++ b/lens-server/src/test/java/org/apache/lens/server/LensServerTestUtil.java
@@ -0,0 +1,298 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.server;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.*;
+
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.GenericType;
+import javax.ws.rs.core.MediaType;
+
+import org.apache.lens.api.LensConf;
+import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.query.LensQuery;
+import org.apache.lens.api.query.QueryHandle;
+import org.apache.lens.api.query.QueryStatus;
+import org.apache.lens.api.result.LensAPIResult;
+import org.apache.lens.server.api.LensConfConstants;
+
+import org.apache.commons.io.FileUtils;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+import org.glassfish.jersey.media.multipart.FormDataBodyPart;
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataMultiPart;
+
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * The Class LensServerTestUtil.
+ */
+@Slf4j
+public final class LensServerTestUtil {
+
+  public static final String DB_WITH_JARS = "test_db_static_jars";
+  public static final String DB_WITH_JARS_2 = "test_db_static_jars_2";
+  private LensServerTestUtil() {
+
+  }
+
+  /**
+   * Creates the table.
+   *
+   * @param tblName       the tbl name
+   * @param parent        the parent
+   * @param lensSessionId the lens session id
+   * @param schemaStr     the schema string, with surrounding parenthesis.
+   * @throws InterruptedException the interrupted exception
+   */
+  public static void createTable(String tblName, WebTarget parent, LensSessionHandle lensSessionId, String schemaStr)
+    throws InterruptedException {
+    LensConf conf = new LensConf();
+    conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
+    final WebTarget target = parent.path("queryapi/queries");
+
+    final FormDataMultiPart mp = new FormDataMultiPart();
+    String createTable = "CREATE TABLE IF NOT EXISTS " + tblName + schemaStr;
+
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
+      MediaType.APPLICATION_XML_TYPE));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), createTable));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
+      MediaType.APPLICATION_XML_TYPE));
+
+    final QueryHandle handle = target.request()
+        .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+            new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
+    // wait till the query finishes
+    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+      .get(LensQuery.class);
+    QueryStatus stat = ctx.getStatus();
+    while (!stat.finished()) {
+      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+      stat = ctx.getStatus();
+      Thread.sleep(1000);
+    }
+    final String debugHelpMsg = "Query Handle:"+ctx.getQueryHandleString();
+    assertEquals(ctx.getStatus().getStatus(), QueryStatus.Status.SUCCESSFUL, debugHelpMsg);
+    assertTrue(ctx.getSubmissionTime() > 0, debugHelpMsg);
+    assertTrue(ctx.getLaunchTime() > 0, debugHelpMsg);
+    assertTrue(ctx.getDriverStartTime() > 0, debugHelpMsg);
+    assertTrue(ctx.getDriverFinishTime() > 0, debugHelpMsg);
+    assertTrue(ctx.getFinishTime() > 0, debugHelpMsg);
+  }
+
+  public static void createTable(String tblName, WebTarget parent, LensSessionHandle lensSessionId)
+    throws InterruptedException {
+    createTable(tblName, parent, lensSessionId, "(ID INT, IDSTR STRING)");
+  }
+
+  public static void loadData(String tblName, final String testDataFile, WebTarget parent,
+      LensSessionHandle lensSessionId) throws InterruptedException {
+    LensConf conf = new LensConf();
+    conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
+    final WebTarget target = parent.path("queryapi/queries");
+
+    final FormDataMultiPart mp = new FormDataMultiPart();
+    String dataLoad = "LOAD DATA LOCAL INPATH '" + testDataFile + "' OVERWRITE INTO TABLE " + tblName;
+
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
+        MediaType.APPLICATION_XML_TYPE));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), dataLoad));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
+        MediaType.APPLICATION_XML_TYPE));
+
+    final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+        new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
+
+    // wait till the query finishes
+    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+        .get(LensQuery.class);
+    QueryStatus stat = ctx.getStatus();
+    while (!stat.finished()) {
+      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+      stat = ctx.getStatus();
+      Thread.sleep(1000);
+    }
+    assertEquals(ctx.getStatus().getStatus(), QueryStatus.Status.SUCCESSFUL);
+  }
+  /**
+   * Load data.
+   *
+   * @param tblName        the tbl name
+   * @param testDataFile the test data file
+   * @param parent         the parent
+   * @param lensSessionId  the lens session id
+   * @throws InterruptedException the interrupted exception
+   */
+  public static void loadDataFromClasspath(String tblName, final String testDataFile, WebTarget parent,
+      LensSessionHandle lensSessionId) throws InterruptedException {
+
+    String absolutePath = LensServerTestUtil.class.getClassLoader().getResource(testDataFile).getPath();
+    loadData(tblName, absolutePath, parent, lensSessionId);
+  }
+
+  /**
+   * Drop table.
+   *
+   * @param tblName       the tbl name
+   * @param parent        the parent
+   * @param lensSessionId the lens session id
+   * @throws InterruptedException the interrupted exception
+   */
+  public static void dropTable(String tblName, WebTarget parent, LensSessionHandle lensSessionId)
+    throws InterruptedException {
+    LensConf conf = new LensConf();
+    conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
+    dropTableWithConf(tblName, parent, lensSessionId, conf);
+  }
+
+  /**
+   * Drop table with conf passed.
+   *
+   * @param tblName       the tbl name
+   * @param parent        the parent
+   * @param lensSessionId the lens session id
+   * @param conf          the query conf
+   *
+   * @throws InterruptedException
+   */
+  public static void dropTableWithConf(String tblName, WebTarget parent, LensSessionHandle lensSessionId,
+    LensConf conf) throws InterruptedException {
+    final WebTarget target = parent.path("queryapi/queries");
+
+    final FormDataMultiPart mp = new FormDataMultiPart();
+    String createTable = "DROP TABLE IF EXISTS " + tblName;
+
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
+      MediaType.APPLICATION_XML_TYPE));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), createTable));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
+      MediaType.APPLICATION_XML_TYPE));
+
+    final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
+        new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
+
+    // wait till the query finishes
+    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
+      .get(LensQuery.class);
+    QueryStatus stat = ctx.getStatus();
+    while (!stat.finished()) {
+      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
+      stat = ctx.getStatus();
+      Thread.sleep(1000);
+    }
+    assertEquals(ctx.getStatus().getStatus(), QueryStatus.Status.SUCCESSFUL);
+  }
+
+  /**
+   * Creates the hive table.
+   *
+   * @param tableName the table name
+   * @throws HiveException the hive exception
+   */
+  public static void createHiveTable(String tableName) throws HiveException {
+    List<FieldSchema> columns = new ArrayList<FieldSchema>();
+    columns.add(new FieldSchema("col1", "string", ""));
+    List<FieldSchema> partCols = new ArrayList<FieldSchema>();
+    partCols.add(new FieldSchema("pcol1", "string", ""));
+    Map<String, String> params = new HashMap<String, String>();
+    params.put("test.hive.table.prop", "tvalue");
+    Table tbl = Hive.get().newTable(tableName);
+    tbl.setTableType(TableType.MANAGED_TABLE);
+    tbl.getTTable().getSd().setCols(columns);
+    tbl.setPartCols(partCols);
+    tbl.getTTable().getParameters().putAll(params);
+    Hive.get().createTable(tbl);
+  }
+
+  /**
+   * Drop hive table.
+   *
+   * @param tableName the table name
+   * @throws HiveException the hive exception
+   */
+  public static void dropHiveTable(String tableName) throws HiveException {
+    Hive.get().dropTable(tableName);
+  }
+
+  public static void createTestDatabaseResources(String[] testDatabases, HiveConf conf) throws Exception {
+    File srcJarDir = new File("target/testjars/");
+    if (!srcJarDir.exists()) {
+      // nothing to setup
+      return;
+    }
+    File resDir = new File("target/resources");
+    if (!resDir.exists()) {
+      resDir.mkdir();
+    }
+
+    // Create databases and resource dirs
+    Hive hive = Hive.get(conf);
+    File testJarFile = new File("target/testjars/test.jar");
+    File serdeJarFile = new File("target/testjars/serde.jar");
+    for (String db : testDatabases) {
+      Database database = new Database();
+      database.setName(db);
+      hive.createDatabase(database, true);
+      File dbDir = new File(resDir, db);
+      if (!dbDir.exists()) {
+        dbDir.mkdir();
+      }
+      // Add a jar in the directory
+      try {
+
+        String[] jarOrder = {
+          "x_" + db + ".jar",
+          "y_" + db + ".jar",
+          "z_" + db + ".jar",
+          "serde.jar",
+        };
+
+        // Jar order is -> z, y, x, File listing order is x, y, z
+        // We are explicitly specifying jar order
+        FileUtils.writeLines(new File(dbDir, "jar_order"), Arrays.asList(jarOrder[2], jarOrder[1],
+          jarOrder[0], jarOrder[3]));
+
+        FileUtils.copyFile(testJarFile, new File(dbDir, jarOrder[0]));
+        FileUtils.copyFile(testJarFile, new File(dbDir, jarOrder[1]));
+        FileUtils.copyFile(testJarFile, new File(dbDir, jarOrder[2]));
+        FileUtils.copyFile(serdeJarFile, new File(dbDir, jarOrder[3]));
+      } catch (FileNotFoundException fnf) {
+        log.error("File not found.", fnf);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java b/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
deleted file mode 100644
index 62e9954..0000000
--- a/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
+++ /dev/null
@@ -1,298 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.server;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertTrue;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.*;
-
-import javax.ws.rs.client.Entity;
-import javax.ws.rs.client.WebTarget;
-import javax.ws.rs.core.GenericType;
-import javax.ws.rs.core.MediaType;
-
-import org.apache.lens.api.LensConf;
-import org.apache.lens.api.LensSessionHandle;
-import org.apache.lens.api.query.LensQuery;
-import org.apache.lens.api.query.QueryHandle;
-import org.apache.lens.api.query.QueryStatus;
-import org.apache.lens.api.result.LensAPIResult;
-import org.apache.lens.server.api.LensConfConstants;
-
-import org.apache.commons.io.FileUtils;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.TableType;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.Table;
-
-import org.glassfish.jersey.media.multipart.FormDataBodyPart;
-import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
-import org.glassfish.jersey.media.multipart.FormDataMultiPart;
-
-import lombok.extern.slf4j.Slf4j;
-
-/**
- * The Class LensTestUtil.
- */
-@Slf4j
-public final class LensTestUtil {
-
-  public static final String DB_WITH_JARS = "test_db_static_jars";
-  public static final String DB_WITH_JARS_2 = "test_db_static_jars_2";
-  private LensTestUtil() {
-
-  }
-
-  /**
-   * Creates the table.
-   *
-   * @param tblName       the tbl name
-   * @param parent        the parent
-   * @param lensSessionId the lens session id
-   * @param schemaStr     the schema string, with surrounding parenthesis.
-   * @throws InterruptedException the interrupted exception
-   */
-  public static void createTable(String tblName, WebTarget parent, LensSessionHandle lensSessionId, String schemaStr)
-    throws InterruptedException {
-    LensConf conf = new LensConf();
-    conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
-    final WebTarget target = parent.path("queryapi/queries");
-
-    final FormDataMultiPart mp = new FormDataMultiPart();
-    String createTable = "CREATE TABLE IF NOT EXISTS " + tblName + schemaStr;
-
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), createTable));
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
-
-    final QueryHandle handle = target.request()
-        .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
-            new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
-    // wait till the query finishes
-    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
-      .get(LensQuery.class);
-    QueryStatus stat = ctx.getStatus();
-    while (!stat.finished()) {
-      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
-      stat = ctx.getStatus();
-      Thread.sleep(1000);
-    }
-    final String debugHelpMsg = "Query Handle:"+ctx.getQueryHandleString();
-    assertEquals(ctx.getStatus().getStatus(), QueryStatus.Status.SUCCESSFUL, debugHelpMsg);
-    assertTrue(ctx.getSubmissionTime() > 0, debugHelpMsg);
-    assertTrue(ctx.getLaunchTime() > 0, debugHelpMsg);
-    assertTrue(ctx.getDriverStartTime() > 0, debugHelpMsg);
-    assertTrue(ctx.getDriverFinishTime() > 0, debugHelpMsg);
-    assertTrue(ctx.getFinishTime() > 0, debugHelpMsg);
-  }
-
-  public static void createTable(String tblName, WebTarget parent, LensSessionHandle lensSessionId)
-    throws InterruptedException {
-    createTable(tblName, parent, lensSessionId, "(ID INT, IDSTR STRING)");
-  }
-
-  public static void loadData(String tblName, final String testDataFile, WebTarget parent,
-      LensSessionHandle lensSessionId) throws InterruptedException {
-    LensConf conf = new LensConf();
-    conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
-    final WebTarget target = parent.path("queryapi/queries");
-
-    final FormDataMultiPart mp = new FormDataMultiPart();
-    String dataLoad = "LOAD DATA LOCAL INPATH '" + testDataFile + "' OVERWRITE INTO TABLE " + tblName;
-
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-        MediaType.APPLICATION_XML_TYPE));
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), dataLoad));
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-        MediaType.APPLICATION_XML_TYPE));
-
-    final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
-        new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
-
-    // wait till the query finishes
-    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
-        .get(LensQuery.class);
-    QueryStatus stat = ctx.getStatus();
-    while (!stat.finished()) {
-      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
-      stat = ctx.getStatus();
-      Thread.sleep(1000);
-    }
-    assertEquals(ctx.getStatus().getStatus(), QueryStatus.Status.SUCCESSFUL);
-  }
-  /**
-   * Load data.
-   *
-   * @param tblName        the tbl name
-   * @param testDataFile the test data file
-   * @param parent         the parent
-   * @param lensSessionId  the lens session id
-   * @throws InterruptedException the interrupted exception
-   */
-  public static void loadDataFromClasspath(String tblName, final String testDataFile, WebTarget parent,
-      LensSessionHandle lensSessionId) throws InterruptedException {
-
-    String absolutePath = LensTestUtil.class.getClassLoader().getResource(testDataFile).getPath();
-    loadData(tblName, absolutePath, parent, lensSessionId);
-  }
-
-  /**
-   * Drop table.
-   *
-   * @param tblName       the tbl name
-   * @param parent        the parent
-   * @param lensSessionId the lens session id
-   * @throws InterruptedException the interrupted exception
-   */
-  public static void dropTable(String tblName, WebTarget parent, LensSessionHandle lensSessionId)
-    throws InterruptedException {
-    LensConf conf = new LensConf();
-    conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
-    dropTableWithConf(tblName, parent, lensSessionId, conf);
-  }
-
-  /**
-   * Drop table with conf passed.
-   *
-   * @param tblName       the tbl name
-   * @param parent        the parent
-   * @param lensSessionId the lens session id
-   * @param conf          the query conf
-   *
-   * @throws InterruptedException
-   */
-  public static void dropTableWithConf(String tblName, WebTarget parent, LensSessionHandle lensSessionId,
-    LensConf conf) throws InterruptedException {
-    final WebTarget target = parent.path("queryapi/queries");
-
-    final FormDataMultiPart mp = new FormDataMultiPart();
-    String createTable = "DROP TABLE IF EXISTS " + tblName;
-
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
-      MediaType.APPLICATION_XML_TYPE));
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), createTable));
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
-    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
-      MediaType.APPLICATION_XML_TYPE));
-
-    final QueryHandle handle = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
-        new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
-
-    // wait till the query finishes
-    LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
-      .get(LensQuery.class);
-    QueryStatus stat = ctx.getStatus();
-    while (!stat.finished()) {
-      ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request().get(LensQuery.class);
-      stat = ctx.getStatus();
-      Thread.sleep(1000);
-    }
-    assertEquals(ctx.getStatus().getStatus(), QueryStatus.Status.SUCCESSFUL);
-  }
-
-  /**
-   * Creates the hive table.
-   *
-   * @param tableName the table name
-   * @throws HiveException the hive exception
-   */
-  public static void createHiveTable(String tableName) throws HiveException {
-    List<FieldSchema> columns = new ArrayList<FieldSchema>();
-    columns.add(new FieldSchema("col1", "string", ""));
-    List<FieldSchema> partCols = new ArrayList<FieldSchema>();
-    partCols.add(new FieldSchema("pcol1", "string", ""));
-    Map<String, String> params = new HashMap<String, String>();
-    params.put("test.hive.table.prop", "tvalue");
-    Table tbl = Hive.get().newTable(tableName);
-    tbl.setTableType(TableType.MANAGED_TABLE);
-    tbl.getTTable().getSd().setCols(columns);
-    tbl.setPartCols(partCols);
-    tbl.getTTable().getParameters().putAll(params);
-    Hive.get().createTable(tbl);
-  }
-
-  /**
-   * Drop hive table.
-   *
-   * @param tableName the table name
-   * @throws HiveException the hive exception
-   */
-  public static void dropHiveTable(String tableName) throws HiveException {
-    Hive.get().dropTable(tableName);
-  }
-
-  public static void createTestDatabaseResources(String[] testDatabases, HiveConf conf) throws Exception {
-    File srcJarDir = new File("target/testjars/");
-    if (!srcJarDir.exists()) {
-      // nothing to setup
-      return;
-    }
-    File resDir = new File("target/resources");
-    if (!resDir.exists()) {
-      resDir.mkdir();
-    }
-
-    // Create databases and resource dirs
-    Hive hive = Hive.get(conf);
-    File testJarFile = new File("target/testjars/test.jar");
-    File serdeJarFile = new File("target/testjars/serde.jar");
-    for (String db : testDatabases) {
-      Database database = new Database();
-      database.setName(db);
-      hive.createDatabase(database, true);
-      File dbDir = new File(resDir, db);
-      if (!dbDir.exists()) {
-        dbDir.mkdir();
-      }
-      // Add a jar in the directory
-      try {
-
-        String[] jarOrder = {
-          "x_" + db + ".jar",
-          "y_" + db + ".jar",
-          "z_" + db + ".jar",
-          "serde.jar",
-        };
-
-        // Jar order is -> z, y, x, File listing order is x, y, z
-        // We are explicitly specifying jar order
-        FileUtils.writeLines(new File(dbDir, "jar_order"), Arrays.asList(jarOrder[2], jarOrder[1],
-          jarOrder[0], jarOrder[3]));
-
-        FileUtils.copyFile(testJarFile, new File(dbDir, jarOrder[0]));
-        FileUtils.copyFile(testJarFile, new File(dbDir, jarOrder[1]));
-        FileUtils.copyFile(testJarFile, new File(dbDir, jarOrder[2]));
-        FileUtils.copyFile(serdeJarFile, new File(dbDir, jarOrder[3]));
-      } catch (FileNotFoundException fnf) {
-        log.error("File not found.", fnf);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java b/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java
index def5683..75f21e1 100644
--- a/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java
+++ b/lens-server/src/test/java/org/apache/lens/server/TestServerMode.java
@@ -61,7 +61,7 @@ public class TestServerMode extends LensAllApplicationJerseyTest {
   @BeforeTest
   public void setUp() throws Exception {
     super.setUp();
-    LensTestUtil.createTable("test_table", target(), RestAPITestUtil.openFooBarSession(target()));
+    LensServerTestUtil.createTable("test_table", target(), RestAPITestUtil.openFooBarSession(target()));
   }
 
   /*

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java b/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
index 49d62f9..7b6c560 100644
--- a/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
+++ b/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
@@ -18,6 +18,8 @@
  */
 package org.apache.lens.server;
 
+import static org.apache.lens.server.LensServerTestUtil.createTable;
+import static org.apache.lens.server.LensServerTestUtil.loadData;
 import static org.apache.lens.server.common.RestAPITestUtil.execute;
 
 import static org.testng.Assert.assertEquals;
@@ -135,8 +137,8 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     createRestartTestDataFile();
 
     // Create a test table
-    LensTestUtil.createTable("test_server_restart", target(), lensSessionId);
-    LensTestUtil.loadData("test_server_restart", TestResourceFile.TEST_DATA_FILE.getValue(), target(), lensSessionId);
+    createTable("test_server_restart", target(), lensSessionId);
+    loadData("test_server_restart", TestResourceFile.TEST_DATA_FILE.getValue(), target(), lensSessionId);
     log.info("Loaded data");
 
     // test post execute op
@@ -211,7 +213,7 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
       }
     }
     log.info("End server restart test");
-    LensTestUtil.dropTable("test_server_restart", target(), lensSessionId);
+    LensServerTestUtil.dropTable("test_server_restart", target(), lensSessionId);
     queryService.closeSession(lensSessionId);
   }
 
@@ -242,8 +244,8 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     log.info("@@ Added resource {}", dataFile.toURI());
 
     // Create a test table
-    LensTestUtil.createTable("test_hive_server_restart", target(), lensSessionId);
-    LensTestUtil.loadData("test_hive_server_restart", TestResourceFile.TEST_DATA_FILE.getValue(), target(),
+    createTable("test_hive_server_restart", target(), lensSessionId);
+    loadData("test_hive_server_restart", TestResourceFile.TEST_DATA_FILE.getValue(), target(),
       lensSessionId);
     log.info("Loaded data");
 
@@ -354,7 +356,7 @@ public class TestServerRestart extends LensAllApplicationJerseyTest {
     // "Expected to be successful " + handle);
 
     log.info("End hive server restart test");
-    LensTestUtil.dropTable("test_hive_server_restart", target(), lensSessionId);
+    LensServerTestUtil.dropTable("test_hive_server_restart", target(), lensSessionId);
     queryService.closeSession(lensSessionId);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
index 3bc692e..e0c0923 100644
--- a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
@@ -48,8 +48,8 @@ import org.apache.lens.api.result.LensAPIResult;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
 import org.apache.lens.server.LensJerseyTest;
+import org.apache.lens.server.LensServerTestUtil;
 import org.apache.lens.server.LensServices;
-import org.apache.lens.server.LensTestUtil;
 import org.apache.lens.server.api.metastore.CubeMetastoreService;
 import org.apache.lens.server.api.util.LensUtil;
 
@@ -2418,7 +2418,7 @@ public class TestMetastoreService extends LensJerseyTest {
       // create hive table
       String tableName = "test_simple_table";
       SessionState.get().setCurrentDatabase(DB);
-      LensTestUtil.createHiveTable(tableName);
+      LensServerTestUtil.createHiveTable(tableName);
 
       WebTarget target = target().path("metastore").path("nativetables");
       // get all native tables

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/test/java/org/apache/lens/server/metrics/TestResourceMethodMetrics.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/metrics/TestResourceMethodMetrics.java b/lens-server/src/test/java/org/apache/lens/server/metrics/TestResourceMethodMetrics.java
index 94e8069..515ac13 100644
--- a/lens-server/src/test/java/org/apache/lens/server/metrics/TestResourceMethodMetrics.java
+++ b/lens-server/src/test/java/org/apache/lens/server/metrics/TestResourceMethodMetrics.java
@@ -41,8 +41,8 @@ import org.apache.lens.api.query.QueryHandle;
 import org.apache.lens.api.result.LensAPIResult;
 import org.apache.lens.server.LensAllApplicationJerseyTest;
 import org.apache.lens.server.LensApplication;
+import org.apache.lens.server.LensServerTestUtil;
 import org.apache.lens.server.LensServices;
-import org.apache.lens.server.LensTestUtil;
 import org.apache.lens.server.api.metastore.CubeMetastoreService;
 import org.apache.lens.server.api.metrics.MethodMetrics;
 import org.apache.lens.server.api.metrics.MetricsService;
@@ -87,16 +87,16 @@ public class TestResourceMethodMetrics extends LensAllApplicationJerseyTest {
   }
 
   private void createTable(String tblName) throws InterruptedException {
-    LensTestUtil.createTable(tblName, target(), lensSessionId);
+    LensServerTestUtil.createTable(tblName, target(), lensSessionId);
   }
 
   private void loadData(String tblName, final String testDataFile) throws InterruptedException {
-    LensTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId);
+    LensServerTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId);
   }
 
   @AfterTest
   public void tearDown() throws Exception {
-    LensTestUtil.dropTable(TestQueryService.TEST_TABLE, target(), lensSessionId);
+    LensServerTestUtil.dropTable(TestQueryService.TEST_TABLE, target(), lensSessionId);
     metastoreService.closeSession(lensSessionId);
     super.tearDown();
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java
new file mode 100644
index 0000000..eb94c89
--- /dev/null
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryConstraints.java
@@ -0,0 +1,285 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.server.query;
+
+import static org.apache.lens.server.api.LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY;
+import static org.apache.lens.server.api.util.LensUtil.getImplementations;
+
+import static org.testng.Assert.*;
+
+import java.util.*;
+
+import javax.ws.rs.core.Application;
+
+import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.jaxb.LensJAXBContextResolver;
+import org.apache.lens.api.query.QueryHandle;
+import org.apache.lens.driver.hive.HiveDriver;
+import org.apache.lens.server.LensJerseyTest;
+import org.apache.lens.server.LensServerConf;
+import org.apache.lens.server.LensServerTestUtil;
+import org.apache.lens.server.LensServices;
+import org.apache.lens.server.api.LensConfConstants;
+import org.apache.lens.server.api.LensServerAPITestUtil;
+import org.apache.lens.server.api.driver.DriverSelector;
+import org.apache.lens.server.api.driver.LensDriver;
+import org.apache.lens.server.api.error.LensException;
+import org.apache.lens.server.api.metrics.MetricsService;
+import org.apache.lens.server.api.query.AbstractQueryContext;
+import org.apache.lens.server.api.query.QueryExecutionService;
+import org.apache.lens.server.common.RestAPITestUtil;
+import org.apache.lens.server.common.TestResourceFile;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+import org.glassfish.jersey.client.ClientConfig;
+import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.glassfish.jersey.test.TestProperties;
+import org.testng.annotations.AfterMethod;
+import org.testng.annotations.AfterTest;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+import com.beust.jcommander.internal.Lists;
+import com.google.common.base.Optional;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * The Class TestQueryService.
+ */
+@Slf4j
+@Test(groups = "two-working-drivers", dependsOnGroups = "filter-test")
+public class TestQueryConstraints extends LensJerseyTest {
+  private HiveConf serverConf;
+
+  public static class MockHiveDriverBase extends HiveDriver {
+
+    private final Configuration customConf;
+
+    /**
+     * Instantiates a new hive driver.
+     *
+     * @throws LensException the lens exception
+     */
+    public MockHiveDriverBase() throws LensException {
+      customConf = new Configuration();
+      customConf.setInt("driver.max.concurrent.launched.queries", 2);
+      customConf.set(HiveDriver.QUERY_LAUNCHING_CONSTRAINT_FACTORIES_KEY,
+        "org.apache.lens.server.api.query.constraint.MaxConcurrentDriverQueriesConstraintFactory");
+    }
+
+    @Override
+    public void configure(Configuration conf) throws LensException {
+      super.configure(conf);
+      queryConstraints = getImplementations(HiveDriver.QUERY_LAUNCHING_CONSTRAINT_FACTORIES_KEY, customConf);
+    }
+  }
+
+  public static class HiveDriver1 extends MockHiveDriverBase {
+
+    /**
+     * Instantiates a new hive driver.
+     *
+     * @throws LensException the lens exception
+     */
+    public HiveDriver1() throws LensException {
+
+    }
+  }
+
+  public static class HiveDriver2 extends MockHiveDriverBase {
+
+    /**
+     * Instantiates a new hive driver.
+     *
+     * @throws LensException the lens exception
+     */
+    public HiveDriver2() throws LensException {
+    }
+  }
+
+  public static class RoundRobinSelector implements DriverSelector {
+    int counter = 0;
+
+    @Override
+    public LensDriver select(AbstractQueryContext ctx, Configuration conf) {
+      final Collection<LensDriver> drivers = ctx.getDriverContext().getDriversWithValidQueryCost();
+      LensDriver driver = drivers.toArray(new LensDriver[drivers.size()])[counter];
+      counter = (counter + 1) % 2;
+      return driver;
+    }
+  }
+
+  /** The query service. */
+  QueryExecutionServiceImpl queryService;
+
+  /** The metrics svc. */
+  MetricsService metricsSvc;
+
+  /** The lens session id. */
+  LensSessionHandle lensSessionId;
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.glassfish.jersey.test.JerseyTest#setUp()
+   */
+  @BeforeTest
+  public void setUp() throws Exception {
+    super.setUp();
+    queryService = LensServices.get().getService(QueryExecutionService.NAME);
+    metricsSvc = LensServices.get().getService(MetricsService.NAME);
+    Map<String, String> sessionConf = new HashMap<>();
+    sessionConf.put("test.session.key", "svalue");
+    lensSessionId = queryService.openSession("foo@localhost", "bar", sessionConf); // @localhost should be removed
+    // automatically
+    createTable(TEST_TABLE);
+    loadData(TEST_TABLE, TestResourceFile.TEST_DATA2_FILE.getValue());
+  }
+
+  @Override
+  public HiveConf getServerConf() {
+    if (serverConf == null) {
+      serverConf = new HiveConf(super.getServerConf());
+      serverConf.set(LensConfConstants.DRIVER_CLASSES,
+        HiveDriver1.class.getName() + "," + HiveDriver2.class.getName());
+      serverConf.set("lens.server.driver.selector.class", RoundRobinSelector.class.getName());
+      LensServerConf.getConfForDrivers().addResource(serverConf);
+    }
+    return serverConf;
+  }
+
+  /*
+     * (non-Javadoc)
+     *
+     * @see org.glassfish.jersey.test.JerseyTest#tearDown()
+     */
+  @AfterTest
+  public void tearDown() throws Exception {
+    dropTable(TEST_TABLE);
+    queryService.closeSession(lensSessionId);
+    for (LensDriver driver : queryService.getDrivers()) {
+      if (driver instanceof HiveDriver) {
+        assertFalse(((HiveDriver) driver).hasLensSession(lensSessionId));
+      }
+    }
+    super.tearDown();
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.glassfish.jersey.test.JerseyTest#configure()
+   */
+  @Override
+  protected Application configure() {
+    enable(TestProperties.LOG_TRAFFIC);
+    enable(TestProperties.DUMP_ENTITY);
+    return new TestQueryService.QueryServiceTestApp();
+  }
+
+  /*
+   * (non-Javadoc)
+   *
+   * @see org.glassfish.jersey.test.JerseyTest#configureClient(org.glassfish.jersey.client.ClientConfig)
+   */
+  @Override
+  protected void configureClient(ClientConfig config) {
+    config.register(MultiPartFeature.class);
+    config.register(LensJAXBContextResolver.class);
+  }
+
+  /** The test table. */
+  public static final String TEST_TABLE = "TEST_TABLE";
+
+  /**
+   * Creates the table.
+   *
+   * @param tblName the tbl name
+   * @throws InterruptedException the interrupted exception
+   */
+  private void createTable(String tblName) throws InterruptedException {
+    LensServerTestUtil.createTable(tblName, target(), lensSessionId);
+  }
+
+  /**
+   * Load data.
+   *
+   * @param tblName      the tbl name
+   * @param testDataFile the test data file
+   * @throws InterruptedException the interrupted exception
+   */
+  private void loadData(String tblName, final String testDataFile) throws InterruptedException {
+    LensServerTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId);
+  }
+
+  /**
+   * Drop table.
+   *
+   * @param tblName the tbl name
+   * @throws InterruptedException the interrupted exception
+   */
+  private void dropTable(String tblName) throws InterruptedException {
+    LensServerTestUtil.dropTable(tblName, target(), lensSessionId);
+  }
+
+  @Test
+  public void testThrottling() throws InterruptedException {
+    List<QueryHandle> handles = Lists.newArrayList();
+    for (int j = 0; j < 5; j++) {
+      for (int i = 0; i < 10; i++) {
+        handles.add(launchQuery());
+        assertValidity();
+      }
+      // No harm in sleeping, the queries will anyway take time.
+      Thread.sleep(1000);
+    }
+    for (QueryHandle handle : handles) {
+      RestAPITestUtil.waitForQueryToFinish(target(), lensSessionId, handle);
+      assertValidity();
+    }
+    for (QueryHandle handle : handles) {
+      RestAPITestUtil.getLensQueryResult(target(), lensSessionId, handle);
+      assertValidity();
+    }
+  }
+
+  private void assertValidity() {
+    QueryExecutionServiceImpl.QueryCount count = queryService.getQueryCountSnapshot();
+    assertTrue(count.running <= 4, System.currentTimeMillis() + " " + count.running + " running queries: "
+      + queryService.getLaunchedQueries());
+    if (count.running == 4) {
+      assertEquals(count.queued, 0);
+    } else {
+      assertEquals(count.waiting, 0);
+    }
+  }
+
+  private QueryHandle launchQuery() {
+    return RestAPITestUtil.executeAndGetHandle(target(), Optional.of(lensSessionId),
+      Optional.of("select ID from " + TEST_TABLE),
+      Optional.of(LensServerAPITestUtil.getLensConf(QUERY_METRIC_UNIQUE_ID_CONF_KEY, UUID.randomUUID())));
+  }
+
+  @AfterMethod
+  private void waitForPurge() throws InterruptedException {
+    waitForPurge(0, queryService.finishedQueries);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
index ec6fd86..4ac42b2 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryEndEmailNotifier.java
@@ -18,7 +18,7 @@
  */
 package org.apache.lens.server.query;
 
-import static org.apache.lens.server.api.util.TestLensUtil.getLensConf;
+import static org.apache.lens.server.api.LensServerAPITestUtil.getLensConf;
 import static org.apache.lens.server.common.RestAPITestUtil.*;
 
 import static org.testng.Assert.assertEquals;
@@ -41,8 +41,8 @@ import org.apache.lens.api.query.QueryResult;
 import org.apache.lens.api.query.QueryStatus.Status;
 import org.apache.lens.api.result.LensAPIResult;
 import org.apache.lens.server.LensJerseyTest;
+import org.apache.lens.server.LensServerTestUtil;
 import org.apache.lens.server.LensServices;
-import org.apache.lens.server.LensTestUtil;
 import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.query.QueryExecutionService;
 import org.apache.lens.server.common.TestResourceFile;
@@ -144,7 +144,7 @@ public class TestQueryEndEmailNotifier extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void createTable(String tblName) throws InterruptedException {
-    LensTestUtil.createTable(tblName, target(), lensSessionId);
+    LensServerTestUtil.createTable(tblName, target(), lensSessionId);
   }
 
   /**
@@ -155,7 +155,7 @@ public class TestQueryEndEmailNotifier extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void loadData(String tblName, final String testDataFile) throws InterruptedException {
-    LensTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId);
+    LensServerTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId);
   }
 
   /**
@@ -165,7 +165,7 @@ public class TestQueryEndEmailNotifier extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void dropTable(String tblName) throws InterruptedException {
-    LensTestUtil.dropTable(tblName, target(), lensSessionId);
+    LensServerTestUtil.dropTable(tblName, target(), lensSessionId);
   }
 
   private QueryHandle launchAndWaitForQuery(LensConf conf, String query, Status expectedStatus)

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index c37b0ed..c8a1cc6 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -20,7 +20,9 @@ package org.apache.lens.server.query;
 
 import static javax.ws.rs.core.Response.Status.*;
 
-import static org.apache.lens.server.api.util.TestLensUtil.getLensConf;
+import static org.apache.lens.server.LensServerTestUtil.DB_WITH_JARS;
+import static org.apache.lens.server.LensServerTestUtil.DB_WITH_JARS_2;
+import static org.apache.lens.server.api.LensServerAPITestUtil.getLensConf;
 import static org.apache.lens.server.common.RestAPITestUtil.*;
 
 import static org.testng.Assert.*;
@@ -48,8 +50,8 @@ import org.apache.lens.api.result.QueryCostTO;
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.driver.hive.HiveDriver;
 import org.apache.lens.server.LensJerseyTest;
+import org.apache.lens.server.LensServerTestUtil;
 import org.apache.lens.server.LensServices;
-import org.apache.lens.server.LensTestUtil;
 import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.driver.LensDriver;
 import org.apache.lens.server.api.error.LensException;
@@ -183,7 +185,7 @@ public class TestQueryService extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void createTable(String tblName) throws InterruptedException {
-    LensTestUtil.createTable(tblName, target(), lensSessionId);
+    LensServerTestUtil.createTable(tblName, target(), lensSessionId);
   }
 
   /**
@@ -194,7 +196,7 @@ public class TestQueryService extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void loadData(String tblName, final String testDataFile) throws InterruptedException {
-    LensTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId);
+    LensServerTestUtil.loadDataFromClasspath(tblName, testDataFile, target(), lensSessionId);
   }
 
   /**
@@ -204,7 +206,7 @@ public class TestQueryService extends LensJerseyTest {
    * @throws InterruptedException the interrupted exception
    */
   private void dropTable(String tblName) throws InterruptedException {
-    LensTestUtil.dropTable(tblName, target(), lensSessionId);
+    LensServerTestUtil.dropTable(tblName, target(), lensSessionId);
   }
 
   // test get a random query, should return 400
@@ -1248,23 +1250,23 @@ public class TestQueryService extends LensJerseyTest {
 
     // Open session with a DB which has static jars
     LensSessionHandle sessionHandle =
-      sessionService.openSession("foo@localhost", "bar", LensTestUtil.DB_WITH_JARS, new HashMap<String, String>());
+      sessionService.openSession("foo@localhost", "bar", DB_WITH_JARS, new HashMap<String, String>());
 
     // Add a jar in the session
     File testJarFile = new File("target/testjars/test2.jar");
     sessionService.addResourceToAllServices(sessionHandle, "jar", "file://" + testJarFile.getAbsolutePath());
 
-    log.info("@@@ Opened session " + sessionHandle.getPublicId() + " with database " + LensTestUtil.DB_WITH_JARS);
+    log.info("@@@ Opened session " + sessionHandle.getPublicId() + " with database " + DB_WITH_JARS);
     LensSessionImpl session = sessionService.getSession(sessionHandle);
 
     // Jars should be pending until query is run
-    assertEquals(session.getPendingSessionResourcesForDatabase(LensTestUtil.DB_WITH_JARS).size(), 1);
-    assertEquals(session.getPendingSessionResourcesForDatabase(LensTestUtil.DB_WITH_JARS_2).size(), 1);
+    assertEquals(session.getPendingSessionResourcesForDatabase(DB_WITH_JARS).size(), 1);
+    assertEquals(session.getPendingSessionResourcesForDatabase(DB_WITH_JARS_2).size(), 1);
 
     final String tableInDBWithJars = "testHiveDriverGetsDBJars";
     try {
       // First execute query on the session with db should load jars from DB
-      LensTestUtil.createTable(tableInDBWithJars, target(), sessionHandle, "(ID INT, IDSTR STRING) "
+      LensServerTestUtil.createTable(tableInDBWithJars, target(), sessionHandle, "(ID INT, IDSTR STRING) "
         + "ROW FORMAT SERDE \"DatabaseJarSerde\"");
 
       boolean addedToHiveDriver = false;
@@ -1272,37 +1274,36 @@ public class TestQueryService extends LensJerseyTest {
       for (LensDriver driver : queryService.getDrivers()) {
         if (driver instanceof HiveDriver) {
           addedToHiveDriver =
-            ((HiveDriver) driver).areDBResourcesAddedForSession(sessionHandle.getPublicId().toString(),
-              LensTestUtil.DB_WITH_JARS);
+            ((HiveDriver) driver).areDBResourcesAddedForSession(sessionHandle.getPublicId().toString(), DB_WITH_JARS);
         }
       }
       assertTrue(addedToHiveDriver);
 
       // Switch database
       log.info("@@@# database switch test");
-      session.setCurrentDatabase(LensTestUtil.DB_WITH_JARS_2);
-      LensTestUtil.createTable(tableInDBWithJars + "_2", target(), sessionHandle, "(ID INT, IDSTR STRING) "
+      session.setCurrentDatabase(DB_WITH_JARS_2);
+      LensServerTestUtil.createTable(tableInDBWithJars + "_2", target(), sessionHandle, "(ID INT, IDSTR STRING) "
         + "ROW FORMAT SERDE \"DatabaseJarSerde\"");
 
       // All db jars should have been added
-      assertTrue(session.getDBResources(LensTestUtil.DB_WITH_JARS_2).isEmpty());
-      assertTrue(session.getDBResources(LensTestUtil.DB_WITH_JARS).isEmpty());
+      assertTrue(session.getDBResources(DB_WITH_JARS_2).isEmpty());
+      assertTrue(session.getDBResources(DB_WITH_JARS).isEmpty());
 
       // All session resources must have been added to both DBs
       assertFalse(session.getLensSessionPersistInfo().getResources().isEmpty());
       for (LensSessionImpl.ResourceEntry resource : session.getLensSessionPersistInfo().getResources()) {
-        assertTrue(resource.isAddedToDatabase(LensTestUtil.DB_WITH_JARS_2));
-        assertTrue(resource.isAddedToDatabase(LensTestUtil.DB_WITH_JARS));
+        assertTrue(resource.isAddedToDatabase(DB_WITH_JARS_2));
+        assertTrue(resource.isAddedToDatabase(DB_WITH_JARS));
       }
 
-      assertTrue(session.getPendingSessionResourcesForDatabase(LensTestUtil.DB_WITH_JARS).isEmpty());
-      assertTrue(session.getPendingSessionResourcesForDatabase(LensTestUtil.DB_WITH_JARS_2).isEmpty());
+      assertTrue(session.getPendingSessionResourcesForDatabase(DB_WITH_JARS).isEmpty());
+      assertTrue(session.getPendingSessionResourcesForDatabase(DB_WITH_JARS_2).isEmpty());
 
     } finally {
       log.info("@@@ TEST_OVER");
       try {
-        LensTestUtil.dropTable(tableInDBWithJars, target(), sessionHandle);
-        LensTestUtil.dropTable(tableInDBWithJars + "_2", target(), sessionHandle);
+        LensServerTestUtil.dropTable(tableInDBWithJars, target(), sessionHandle);
+        LensServerTestUtil.dropTable(tableInDBWithJars + "_2", target(), sessionHandle);
       } catch (Throwable th) {
         log.error("Exception while dropping table.", th);
       }
@@ -1361,13 +1362,13 @@ public class TestQueryService extends LensJerseyTest {
     LensConf conf = new LensConf();
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "true");
     String tblName = "testNonSelectQueriesWithPersistResult";
-    LensTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf);
+    LensServerTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf);
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, "true");
-    LensTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf);
+    LensServerTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf);
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, "false");
-    LensTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf);
+    LensServerTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf);
     conf.addProperty(LensConfConstants.QUERY_PERSISTENT_RESULT_SET, "false");
-    LensTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf);
+    LensServerTestUtil.dropTableWithConf(tblName, target(), lensSessionId, conf);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java b/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
index 987129c..30d1e34 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
@@ -18,7 +18,9 @@
  */
 package org.apache.lens.server.query;
 
-import static org.testng.Assert.assertTrue;
+import static org.apache.lens.server.LensServerTestUtil.*;
+
+import static org.testng.Assert.*;
 
 import java.io.IOException;
 import java.util.HashMap;
@@ -38,17 +40,23 @@ import org.apache.lens.api.query.QueryStatus.Status;
 import org.apache.lens.api.result.LensAPIResult;
 import org.apache.lens.server.LensJerseyTest;
 import org.apache.lens.server.LensServices;
-import org.apache.lens.server.LensTestUtil;
 import org.apache.lens.server.api.LensConfConstants;
-import org.apache.lens.server.api.query.*;
+import org.apache.lens.server.api.query.InMemoryOutputFormatter;
+import org.apache.lens.server.api.query.PersistedOutputFormatter;
+import org.apache.lens.server.api.query.QueryContext;
+import org.apache.lens.server.api.query.QueryExecutionService;
 import org.apache.lens.server.common.TestResourceFile;
 
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 
 import org.glassfish.jersey.client.ClientConfig;
-import org.glassfish.jersey.media.multipart.*;
-import org.testng.Assert;
-import org.testng.annotations.*;
+import org.glassfish.jersey.media.multipart.FormDataBodyPart;
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataMultiPart;
+import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.testng.annotations.AfterTest;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
 
 import lombok.extern.slf4j.Slf4j;
 
@@ -75,9 +83,9 @@ public class TestResultFormatting extends LensJerseyTest {
     super.setUp();
     queryService = LensServices.get().getService(QueryExecutionService.NAME);
     lensSessionId = queryService.openSession("foo", "bar", new HashMap<String, String>());
-    LensTestUtil.createTable(testTable, target(), lensSessionId,
+    createTable(testTable, target(), lensSessionId,
       "(ID INT, IDSTR STRING, IDARR ARRAY<INT>, IDSTRARR ARRAY<STRING>)");
-    LensTestUtil.loadDataFromClasspath(testTable, TestResourceFile.TEST_DATA2_FILE.getValue(), target(), lensSessionId);
+    loadDataFromClasspath(testTable, TestResourceFile.TEST_DATA2_FILE.getValue(), target(), lensSessionId);
   }
 
   /*
@@ -87,7 +95,7 @@ public class TestResultFormatting extends LensJerseyTest {
    */
   @AfterTest
   public void tearDown() throws Exception {
-    LensTestUtil.dropTable(testTable, target(), lensSessionId);
+    dropTable(testTable, target(), lensSessionId);
     queryService.closeSession(lensSessionId);
     super.tearDown();
   }
@@ -212,7 +220,7 @@ public class TestResultFormatting extends LensJerseyTest {
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
         new GenericType<LensAPIResult<QueryHandle>>() {}).getData();
 
-    Assert.assertNotNull(handle);
+    assertNotNull(handle);
 
     // Get query
     LensQuery ctx = target.path(handle.toString()).queryParam("sessionid", lensSessionId).request()
@@ -225,7 +233,7 @@ public class TestResultFormatting extends LensJerseyTest {
       Thread.sleep(1000);
     }
 
-    Assert.assertEquals(ctx.getStatus().getStatus(), status);
+    assertEquals(ctx.getStatus().getStatus(), status);
 
     if (status.equals(QueryStatus.Status.SUCCESSFUL)) {
       QueryContext qctx = queryService.getQueryContext(handle);
@@ -239,12 +247,12 @@ public class TestResultFormatting extends LensJerseyTest {
       } else if (!isDir) {
         // isDir is true if the formatter is skipped due to result being the max size allowed
         if (qctx.isDriverPersistent()) {
-          Assert.assertTrue(qctx.getQueryOutputFormatter() instanceof PersistedOutputFormatter);
+          assertTrue(qctx.getQueryOutputFormatter() instanceof PersistedOutputFormatter);
         } else {
-          Assert.assertTrue(qctx.getQueryOutputFormatter() instanceof InMemoryOutputFormatter);
+          assertTrue(qctx.getQueryOutputFormatter() instanceof InMemoryOutputFormatter);
         }
       } else {
-        Assert.assertNull(qctx.getQueryOutputFormatter());
+        assertNull(qctx.getQueryOutputFormatter());
       }
       // fetch results
       TestQueryService.validatePersistedResult(handle, target(), lensSessionId, new String[][]{
@@ -259,7 +267,7 @@ public class TestResultFormatting extends LensJerseyTest {
       assertTrue(ctx.getDriverStartTime() > 0);
       assertTrue(ctx.getDriverFinishTime() > 0);
       assertTrue(ctx.getFinishTime() > 0);
-      Assert.assertEquals(ctx.getStatus().getStatus(), QueryStatus.Status.FAILED);
+      assertEquals(ctx.getStatus().getStatus(), QueryStatus.Status.FAILED);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/test/java/org/apache/lens/server/session/TestDatabaseResourceService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/session/TestDatabaseResourceService.java b/lens-server/src/test/java/org/apache/lens/server/session/TestDatabaseResourceService.java
index 28bed27..1008faf 100644
--- a/lens-server/src/test/java/org/apache/lens/server/session/TestDatabaseResourceService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/session/TestDatabaseResourceService.java
@@ -24,7 +24,7 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 
-import org.apache.lens.server.LensTestUtil;
+import org.apache.lens.server.LensServerTestUtil;
 import org.apache.lens.server.api.LensConfConstants;
 
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -53,7 +53,7 @@ public class TestDatabaseResourceService {
 
   @BeforeClass
   public void setup() throws Exception {
-    LensTestUtil.createTestDatabaseResources(testDatabases, conf);
+    LensServerTestUtil.createTestDatabaseResources(testDatabases, conf);
     // Start resource service.
     conf.set(LensConfConstants.DATABASE_RESOURCE_DIR, "target/resources");
     dbResService = new DatabaseResourceService(DatabaseResourceService.NAME);

http://git-wip-us.apache.org/repos/asf/lens/blob/b71be2dc/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java b/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java
index 464533d..5feab71 100644
--- a/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java
+++ b/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java
@@ -25,7 +25,7 @@ import java.util.HashMap;
 
 import org.apache.lens.api.LensSessionHandle;
 import org.apache.lens.server.LensServerConf;
-import org.apache.lens.server.LensTestUtil;
+import org.apache.lens.server.LensServerTestUtil;
 import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.user.UserConfigLoaderFactory;
 
@@ -57,7 +57,7 @@ public class TestSessionClassLoaders {
      * test2.jar containing ClassLoaderTestClass2.class added to session via addResource
      */
     // Create test databases and tables
-    LensTestUtil.createTestDatabaseResources(new String[]{DB1}, conf);
+    LensServerTestUtil.createTestDatabaseResources(new String[]{DB1}, conf);
 
     conf.setVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_CLASSNAME, LensSessionImpl.class.getName());
     conf.set(LensConfConstants.DATABASE_RESOURCE_DIR, "target/resources");


[43/50] [abbrv] lens git commit: LENS-903 : No candidate dim available exception should contain only brief error

Posted by sh...@apache.org.
LENS-903 : No candidate dim available exception should contain only brief error


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/b84cb2cd
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/b84cb2cd
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/b84cb2cd

Branch: refs/heads/LENS-581
Commit: b84cb2cd32a53806278bbe5d45028dbaa760bdf5
Parents: 4d3d2f8
Author: Sushil Mohanty <su...@apache.org>
Authored: Thu Dec 17 17:31:04 2015 +0530
Committer: Sushil Mohanty <su...@apache.org>
Committed: Thu Dec 17 17:31:04 2015 +0530

----------------------------------------------------------------------
 .../lens/cube/parse/CubeQueryContext.java       |  7 ++++---
 .../lens/cube/parse/TestCubeRewriter.java       | 22 +++++++++++++++++++-
 2 files changed, 25 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/b84cb2cd/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index f75a6b9..4034a54 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -32,6 +32,7 @@ import java.io.IOException;
 import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.error.NoCandidateDimAvailableException;
 import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
@@ -810,12 +811,12 @@ public class CubeQueryContext implements TrackQueriedColumns {
               }
             }
           }
-          throw new LensException(LensCubeErrorCode.NO_CANDIDATE_DIM_AVAILABLE.getLensErrorInfo(),
-              dim.getName(), reason);
+          log.error("Query rewrite failed due to NO_CANDIDATE_DIM_AVAILABLE, Cause {}",
+                  dimPruningMsgs.get(dim).toJsonObject());
+          throw new NoCandidateDimAvailableException(dimPruningMsgs.get(dim));
         }
       }
     }
-
     return dimsToQuery;
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b84cb2cd/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 802ff42..9a08735 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -32,6 +32,7 @@ import java.text.SimpleDateFormat;
 import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.error.NoCandidateDimAvailableException;
 import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCause;
@@ -46,6 +47,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 
@@ -1078,6 +1080,23 @@ public class TestCubeRewriter extends TestQueryRewrite {
   }
 
   @Test
+  public void testNoCandidateDimAvailableExceptionCompare() throws Exception {
+
+    //Max cause COLUMN_NOT_FOUND, Ordinal 9
+    PruneCauses<CubeDimensionTable> pr1 = new PruneCauses<CubeDimensionTable>();
+    pr1.addPruningMsg(new CubeDimensionTable(new Table("test", "citydim")),
+            CandidateTablePruneCause.columnNotFound("test1", "test2", "test3"));
+    NoCandidateDimAvailableException ne1 = new NoCandidateDimAvailableException(pr1);
+
+    //Max cause EXPRESSION_NOT_EVALUABLE, Ordinal 6
+    PruneCauses<CubeDimensionTable> pr2 = new PruneCauses<CubeDimensionTable>();
+    pr2.addPruningMsg(new CubeDimensionTable(new Table("test", "citydim")),
+            CandidateTablePruneCause.expressionNotEvaluable("testexp1", "testexp2"));
+    NoCandidateDimAvailableException ne2 = new NoCandidateDimAvailableException(pr2);
+    assertEquals(ne1.compareTo(ne2), 3);
+  }
+
+  @Test
   public void testDimensionQueryWithMultipleStorages() throws Exception {
     String hqlQuery = rewrite("select name, stateid from" + " citydim", getConf());
     String expected =
@@ -1095,7 +1114,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     // state table is present on c1 with partition dumps and partitions added
     LensException e = getLensExceptionInRewrite("select name, capital from statedim ", conf);
     assertEquals(e.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_DIM_AVAILABLE.getLensErrorInfo().getErrorCode());
-    assertEquals(extractPruneCause(e), new PruneCauses.BriefAndDetailedError(
+    NoCandidateDimAvailableException ne = (NoCandidateDimAvailableException) e;
+    assertEquals(ne.getJsonMessage(), new PruneCauses.BriefAndDetailedError(
       NO_CANDIDATE_STORAGES.errorFormat,
       new HashMap<String, List<CandidateTablePruneCause>>() {
         {


[16/50] [abbrv] lens git commit: LEN-873: Merge release branch into master

Posted by sh...@apache.org.
LEN-873: Merge release branch into master


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/73716cb6
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/73716cb6
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/73716cb6

Branch: refs/heads/LENS-581
Commit: 73716cb6fee54811c1e7c942d52e4636cc49606b
Parents: b66592c
Author: Raju Bairishetti <ra...@apache.org>
Authored: Wed Nov 25 08:22:17 2015 +0800
Committer: Raju Bairishetti <ra...@apache.org>
Committed: Wed Nov 25 08:22:17 2015 +0800

----------------------------------------------------------------------
 .../lens/cube/parse/TestCubeRewriter.java       |  2 ++
 .../org/apache/lens/cube/parse/TestQuery.java   |  1 +
 .../driver/jdbc/TestColumnarSQLRewriter.java    |  2 --
 lens-regression/pom.xml                         | 13 +++++----
 lens-ship-jars/pom.xml                          |  4 +--
 src/site/apt/releases/download.apt              |  2 +-
 src/site/apt/releases/release-history.apt       | 28 ++++++++++++++++++++
 7 files changed, 40 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/73716cb6/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 04b7ab1..fea70b7 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -39,6 +39,7 @@ import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.time.DateUtils;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -54,6 +55,7 @@ import com.google.common.base.Splitter;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
+
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j

http://git-wip-us.apache.org/repos/asf/lens/blob/73716cb6/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
index db3ba9b..cd20fef 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.ql.parse.ASTNode;
 import com.google.common.base.Objects;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
+
 import lombok.Data;
 import lombok.extern.slf4j.Slf4j;
 

http://git-wip-us.apache.org/repos/asf/lens/blob/73716cb6/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
----------------------------------------------------------------------
diff --git a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
index 7772d16..cf795fa 100644
--- a/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
+++ b/lens-driver-jdbc/src/test/java/org/apache/lens/driver/jdbc/TestColumnarSQLRewriter.java
@@ -629,7 +629,6 @@ public class TestColumnarSQLRewriter {
             + "( time_dim___time_dim . day ), ( item_dim___item_dim . item_key ) "
             + "order by dollars_sold  desc";
     compareQueries(actual, expected);
-
   }
 
   @Test
@@ -683,7 +682,6 @@ public class TestColumnarSQLRewriter {
             + " ( time_dim___time_dim . day ), ( item_dim___item_dim . item_key ) "
             + "order by dollars_sold  desc";
     compareQueries(actual, expected);
-
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lens/blob/73716cb6/lens-regression/pom.xml
----------------------------------------------------------------------
diff --git a/lens-regression/pom.xml b/lens-regression/pom.xml
index 8555e36..807d3b2 100644
--- a/lens-regression/pom.xml
+++ b/lens-regression/pom.xml
@@ -87,14 +87,13 @@
                         </goals>
                         <configuration>
                             <target>
-                                <property name="build.compiler" value="extJavac"/>
-                                <property name="compile_classpath" refid="maven.compile.classpath"/>
-                                <mkdir dir="${project.build.directory}/additional-classes"/>
-                                <javac includeantruntime="false" srcdir="src/additional/java"
-                                       destdir="${project.build.directory}/additional-classes" classpath="${compile_classpath}"/>
-                                <mkdir dir="${project.build.directory}/testjars"/>
+                                <property name="build.compiler" value="extJavac" />
+                                <property name="compile_classpath" refid="maven.compile.classpath" />
+                                <mkdir dir="${project.build.directory}/additional-classes" />
+                                <javac includeantruntime="false" srcdir="src/additional/java" destdir="${project.build.directory}/additional-classes" classpath="${compile_classpath}" />
+                                <mkdir dir="${project.build.directory}/testjars" />
                                 <jar destfile="${project.build.directory}/testjars/hiveudftest.jar">
-                                    <fileset dir="${project.build.directory}/additional-classes" includes="SampleUdf.class"/>
+                                    <fileset dir="${project.build.directory}/additional-classes" includes="SampleUdf.class" />
                                 </jar>
                             </target>
                         </configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/73716cb6/lens-ship-jars/pom.xml
----------------------------------------------------------------------
diff --git a/lens-ship-jars/pom.xml b/lens-ship-jars/pom.xml
index b1d937b..6268445 100644
--- a/lens-ship-jars/pom.xml
+++ b/lens-ship-jars/pom.xml
@@ -99,8 +99,8 @@
                 <transformer implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
                   <resource>log4j.properties</resource>
                 </transformer>
-                <transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer"/>
-                <transformer implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer"/>
+                <transformer implementation="org.apache.maven.plugins.shade.resource.ApacheLicenseResourceTransformer" />
+                <transformer implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer" />
               </transformers>
             </configuration>
           </execution>

http://git-wip-us.apache.org/repos/asf/lens/blob/73716cb6/src/site/apt/releases/download.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/releases/download.apt b/src/site/apt/releases/download.apt
index eba5172..37ac480 100644
--- a/src/site/apt/releases/download.apt
+++ b/src/site/apt/releases/download.apt
@@ -20,7 +20,7 @@
 Download
 
   The latest release of Apache Lens can be
-  {{{http://www.apache.org/dyn/closer.cgi/lens/2.3-beta}downloaded from the ASF}}.
+  {{{http://www.apache.org/dyn/closer.lua/lens/2.4-beta}downloaded from the ASF}}.
 
   Older releases may be found {{{http://archive.apache.org/dist/lens}in the archives}}.
   Releases prior to graduation may be found {{{http://archive.apache.org/dist/incubator/lens}in the incubator archives}}.

http://git-wip-us.apache.org/repos/asf/lens/blob/73716cb6/src/site/apt/releases/release-history.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/releases/release-history.apt b/src/site/apt/releases/release-history.apt
index b683360..1879acc 100644
--- a/src/site/apt/releases/release-history.apt
+++ b/src/site/apt/releases/release-history.apt
@@ -23,6 +23,8 @@ All Apache Lens releases
 *--+--+---+--+--+--+--+
 |Release version|Major features|Release documentation|Release Notes|Download|Incompatibilities|More Info |
 *--+--+---+--+--+--+--+
+|2.4.0-beta| JDK8 support, new web client, Saved Query and Parameterization, Support flattening of columns selected through bridge-tables(many-to-many relationships), column name mapping for columns in underlying storage tables, Enhancements cube.xsd and jdbc driver, Improvements in Error codes, Doc improvements, bug fixes| {{{../versions/2.4.0-beta/index.html} 2.4.x docs}} |{{{https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12315923&version=12333087} 2.4 release notes}} |{{{http://archive.apache.org/dist/lens/2.4-beta/} Download}} |{{{#a2.4.x_from_2.3.x} 2.4.x-incompatibilities}} | - |
+*--+--+---+--+--+--+--+
 |2.3.0-beta | Per Query Logs, Server side XML validation, File size in query result, File name suggestions in CLI, Phase 1 Query rewriter, Elastic Search driver, Query launching constraints, Update partition api, Lens Service Health check, Doc/javadoc improvements, Code cleanup, bug fixes| {{{../versions/2.3.0-beta/index.html} 2.3.x docs}} | {{{https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12332350&projectId=12315923} 2.3 release notes}}| {{{http://archive.apache.org/dist/lens/2.3-beta/} Download}} | {{{#a2.3.x_from_2.2.x} 2.3.x-incompatibilities}} | - |
 *--+--+---+--+--+--+--+
 |2.2.0-beta-incubating| CLI Improvements, Support for multiple expressions and and union queries, More descriptive error codes, Zeppelin integration, Code cleanup| {{{../versions/2.2.0-beta-incubating/index.html} 2.2.x-incubating docs}} | {{{https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12329586&projectId=12315923} 2.2 release notes}}| {{{http://archive.apache.org/dist/incubator/lens/2.2-beta-incubating/} Download}} | {{{#a2.2.x_from_2.1.x} 2.2.x-incompatibilities}} | - |
@@ -40,6 +42,32 @@ All Apache Lens releases
 
 * Incompatibilities
 
+** 2.4.x from 2.3.x
+
+  * LENS-187: Moved all errors from inmobi hive to lens. All errors will be having same error code but all exceptions
+    are mapped to LensException instead of HiveException, SemanticException.
+
+  * LENS-446: Removed all server params from session and driver configurations. Earlier, all server params was passed to
+    drivers and showing through session api. We can not see anymore server side properties from session params.
+
+  * LENS-224: Driver class name also will be stored for finished queries. This will be an additional column(driverclass)
+    in finished_queries table.
+
+  * LENS-693: Purges the finished queries to DB periodically. Added a new config property
+    lens.server.querypurger.sleep.interval for configuring purge interval. No more depends on number of finished queries
+    to purge the finished queries to DB.
+
+  * LENS-734: All query output files are generated through HadoopFileFormatter and ZipFileFormatter will be having
+    query name in output file name. Earlier, query output file does not contain query name in the output file
+
+  * LENS-737: Give single error with LensMultiException to user. Earlier, we used give Internal server error to users as
+    multiple drivers gives different errors.
+
+  * LENS-774: Fixed spelling mistake in lens.server.session.expiry.service.interval.secs property.
+    Earlier, LensConfConstants.java file and lensserver-default.xml are having different property name
+    because of spelling error. Due to that we were unable to override lens.server.session.expiry.service.interval.secs
+    property.
+
 ** 2.3.x from 2.2.x
 
   * LENS-681 : Passing a malformed xml doesn't succeed anymore.


[18/50] [abbrv] lens git commit: LENS-878 : Refactor inner classes in JoinResolver

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/f7ab827e/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinTree.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinTree.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinTree.java
new file mode 100644
index 0000000..5a294af
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinTree.java
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import java.util.*;
+
+import org.apache.lens.cube.metadata.AbstractCubeTable;
+import org.apache.lens.cube.metadata.SchemaGraph;
+
+import org.apache.hadoop.hive.ql.parse.JoinType;
+
+import lombok.Data;
+import lombok.EqualsAndHashCode;
+import lombok.ToString;
+
+@Data
+@ToString(exclude = "parent")
+@EqualsAndHashCode(exclude = "parent")
+public class JoinTree {
+  //parent of the node
+  JoinTree parent;
+  // current table is parentRelationship.destTable;
+  SchemaGraph.TableRelationship parentRelationship;
+  // Alias for the join clause
+  String alias;
+  private Map<SchemaGraph.TableRelationship, JoinTree> subtrees =
+    new LinkedHashMap<SchemaGraph.TableRelationship, JoinTree>();
+  // Number of nodes from root to this node. depth of root is 0. Unused for now.
+  private int depthFromRoot;
+  // join type of the current table.
+  JoinType joinType;
+
+  public static JoinTree createRoot() {
+    return new JoinTree(null, null, 0);
+  }
+
+  public JoinTree(JoinTree parent, SchemaGraph.TableRelationship tableRelationship,
+                  int depthFromRoot) {
+    this.parent = parent;
+    this.parentRelationship = tableRelationship;
+    this.depthFromRoot = depthFromRoot;
+  }
+
+  public JoinTree addChild(SchemaGraph.TableRelationship tableRelationship,
+                           CubeQueryContext cubeql, Map<String, Integer> aliasUsage) {
+    if (getSubtrees().get(tableRelationship) == null) {
+      JoinTree current = new JoinTree(this, tableRelationship,
+        this.depthFromRoot + 1);
+      // Set alias. Need to compute only when new node is being created.
+      // The following code ensures that For intermediate tables, aliases are given
+      // in the order citydim, citydim_0, citydim_1, ...
+      // And for destination tables, an alias will be decided from here but might be
+      // overridden outside this function.
+      AbstractCubeTable destTable = tableRelationship.getToTable();
+      current.setAlias(cubeql.getAliasForTableName(destTable.getName()));
+      if (aliasUsage.get(current.getAlias()) == null) {
+        aliasUsage.put(current.getAlias(), 0);
+      } else {
+        aliasUsage.put(current.getAlias(), aliasUsage.get(current.getAlias()) + 1);
+        current.setAlias(current.getAlias() + "_" + (aliasUsage.get(current.getAlias()) - 1));
+      }
+      getSubtrees().put(tableRelationship, current);
+    }
+    return getSubtrees().get(tableRelationship);
+  }
+
+  // Recursive computation of number of edges.
+  public int getNumEdges() {
+    int ret = 0;
+    for (JoinTree tree : getSubtrees().values()) {
+      ret += 1;
+      ret += tree.getNumEdges();
+    }
+    return ret;
+  }
+
+  public boolean isLeaf() {
+    return getSubtrees().isEmpty();
+  }
+
+  // Breadth First Traversal. Unused currently.
+  public Iterator<JoinTree> bft() {
+    return new Iterator<JoinTree>() {
+      List<JoinTree> remaining = new ArrayList<JoinTree>() {
+        {
+          addAll(getSubtrees().values());
+        }
+      };
+
+      @Override
+      public boolean hasNext() {
+        return remaining.isEmpty();
+      }
+
+      @Override
+      public JoinTree next() {
+        JoinTree retval = remaining.remove(0);
+        remaining.addAll(retval.getSubtrees().values());
+        return retval;
+      }
+
+      @Override
+      public void remove() {
+        throw new RuntimeException("Not implemented");
+      }
+    };
+  }
+
+  // Depth first traversal of the tree. Used in forming join string.
+  public Iterator<JoinTree> dft() {
+    return new Iterator<JoinTree>() {
+      Stack<JoinTree> joinTreeStack = new Stack<JoinTree>() {
+        {
+          addAll(getSubtrees().values());
+        }
+      };
+
+      @Override
+      public boolean hasNext() {
+        return !joinTreeStack.isEmpty();
+      }
+
+      @Override
+      public JoinTree next() {
+        JoinTree retval = joinTreeStack.pop();
+        joinTreeStack.addAll(retval.getSubtrees().values());
+        return retval;
+      }
+
+      @Override
+      public void remove() {
+        throw new RuntimeException("Not implemented");
+      }
+    };
+  }
+
+  public Set<JoinTree> leaves() {
+    Set<JoinTree> leaves = new HashSet<JoinTree>();
+    Iterator<JoinTree> dft = dft();
+    while (dft.hasNext()) {
+      JoinTree cur = dft.next();
+      if (cur.isLeaf()) {
+        leaves.add(cur);
+      }
+    }
+    return leaves;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/f7ab827e/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
index 8a84249..f772279 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
@@ -183,7 +183,7 @@ class TimerangeResolver implements ContextRewriter {
     }
 
     // Remove join paths that have columns with invalid life span
-    JoinResolver.AutoJoinContext joinContext = cubeql.getAutoJoinCtx();
+    AutoJoinContext joinContext = cubeql.getAutoJoinCtx();
     if (joinContext == null) {
       return;
     }


[45/50] [abbrv] lens git commit: LENS-905 : Group by is not promoted when keys projected along with having clause

Posted by sh...@apache.org.
LENS-905 : Group by is not promoted when keys projected along with having clause


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/c1790813
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/c1790813
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/c1790813

Branch: refs/heads/LENS-581
Commit: c17908134e004c646724d7e464252ec76a43113d
Parents: d6aeecc
Author: Sushil Mohanty <su...@apache.org>
Authored: Fri Dec 18 11:50:33 2015 +0530
Committer: Sushil Mohanty <su...@apache.org>
Committed: Fri Dec 18 11:50:33 2015 +0530

----------------------------------------------------------------------
 .../org/apache/lens/cube/parse/AggregateResolver.java    |  4 +++-
 .../apache/lens/cube/parse/TestAggregateResolver.java    | 11 +++++++++--
 2 files changed, 12 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/c1790813/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
index b544a67..39bd1cc 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
@@ -97,7 +97,9 @@ class AggregateResolver implements ContextRewriter {
     Configuration distConf = cubeql.getConf();
     boolean isDimOnlyDistinctEnabled = distConf.getBoolean(CubeQueryConfUtil.ENABLE_ATTRFIELDS_ADD_DISTINCT,
       CubeQueryConfUtil.DEFAULT_ATTR_FIELDS_ADD_DISTINCT);
-    if (isDimOnlyDistinctEnabled) {
+    //Having clause will always work with measures, if only keys projected
+    //query should skip distinct and promote group by.
+    if (cubeql.getHavingAST() == null && isDimOnlyDistinctEnabled) {
       // Check if any measure/aggregate columns and distinct clause used in
       // select tree. If not, update selectAST token "SELECT" to "SELECT DISTINCT"
       if (!hasMeasures(cubeql, cubeql.getSelectAST()) && !isDistinctClauseUsed(cubeql.getSelectAST())

http://git-wip-us.apache.org/repos/asf/lens/blob/c1790813/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
index a48d753..35234a1 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
@@ -93,6 +93,9 @@ public class TestAggregateResolver extends TestQueryRewrite {
 
     String q10 = "SELECT cityid, round(testCube.msr2) from testCube where " + TWO_DAYS_RANGE;
 
+    //dimension selected with having
+    String q11 = "SELECT cityid from testCube where " + TWO_DAYS_RANGE + " having (testCube.msr2 > 100)";
+
     String expectedq1 =
       getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2) from ", null,
         "group by testcube.cityid", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
@@ -128,13 +131,17 @@ public class TestAggregateResolver extends TestQueryRewrite {
     String expectedq10 =
       getExpectedQuery(cubeName, "SELECT testcube.cityid," + " round(sum(testCube.msr2)) from ", null,
         "group by testcube.cityid", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-
+    String expectedq11 =
+      getExpectedQuery(cubeName, "SELECT testcube.cityid from ", null,
+        "group by testcube.cityid" + "having sum(testCube.msr2) > 100",
+              getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
     String[] tests = {
-      q1, q2, q3, q4, q5, q6, q7, q8, q9, q10,
+      q1, q2, q3, q4, q5, q6, q7, q8, q9, q10, q11,
     };
     String[] expected = {
       expectedq1, expectedq2, expectedq3, expectedq4, expectedq5,
       expectedq6, expectedq7, expectedq8, expectedq9, expectedq10,
+      expectedq11,
     };
 
     for (int i = 0; i < tests.length; i++) {


[06/50] [abbrv] lens git commit: LENS-828 : Adds validation for derived cube creation

Posted by sh...@apache.org.
LENS-828 : Adds validation for derived cube creation


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/7b5f4a09
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/7b5f4a09
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/7b5f4a09

Branch: refs/heads/LENS-581
Commit: 7b5f4a09a9471fdedc25e955cc4fd6990e89f1ef
Parents: bc86587
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Wed Nov 18 13:55:59 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Nov 18 13:55:59 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/lens-errors.conf    | 12 +++++-
 .../lens/cube/error/LensCubeErrorCode.java      |  3 +-
 .../org/apache/lens/cube/metadata/Cube.java     | 10 ++---
 .../lens/cube/metadata/CubeMetastoreClient.java |  2 +-
 .../apache/lens/cube/metadata/DerivedCube.java  | 43 ++++++++++++++++++--
 .../cube/metadata/TestCubeMetastoreClient.java  |  8 ++--
 .../apache/lens/cube/parse/CubeTestSetup.java   | 17 ++++++--
 .../metastore/CubeMetastoreServiceImpl.java     | 13 +-----
 .../apache/lens/server/metastore/JAXBUtils.java | 30 +++++++-------
 .../server/metastore/TestMetastoreService.java  | 36 ++++++++++++----
 10 files changed, 120 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/7b5f4a09/lens-api/src/main/resources/lens-errors.conf
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/lens-errors.conf b/lens-api/src/main/resources/lens-errors.conf
index a582dc2..7526456 100644
--- a/lens-api/src/main/resources/lens-errors.conf
+++ b/lens-api/src/main/resources/lens-errors.conf
@@ -99,7 +99,7 @@ lensServerErrors = [
 
 # lensCubeErrors: Defined for lens-cube module
 
-lensCubeErrors = [
+lensCubeErrorsForQuery = [
 
   {
     errorCode = 3001
@@ -284,6 +284,16 @@ lensCubeErrors = [
   }
 ]
 
+lensCubeErrorsForMetastore = [
+  {
+    errorCode = 3101
+    httpStatusCode = ${BAD_REQUEST}
+    errorMsg = "Problem in submitting entity: %s"
+  }
+]
+
+lensCubeErrors = ${lensCubeErrorsForQuery}${lensCubeErrorsForMetastore}
+
 # Overriding errors in lens-errors.conf via lens-errors-override.conf:
 #
 # Step 1: Copy contents of lens-errors.conf to lens-errors-override.conf

http://git-wip-us.apache.org/repos/asf/lens/blob/7b5f4a09/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
index 73a584f..2119b64 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
@@ -52,7 +52,8 @@ public enum LensCubeErrorCode {
   NO_CANDIDATE_DIM_AVAILABLE(3027, 1100),
   NO_CANDIDATE_FACT_AVAILABLE(3028, 1200),
   NO_CANDIDATE_DIM_STORAGE_TABLES(3029, 1300),
-  NO_STORAGE_TABLE_AVAIABLE(3030, 1400);
+  NO_STORAGE_TABLE_AVAIABLE(3030, 1400),
+  ERROR_IN_ENTITY_DEFINITION(3101, 100);
 
   public LensErrorInfo getLensErrorInfo() {
     return this.errorInfo;

http://git-wip-us.apache.org/repos/asf/lens/blob/7b5f4a09/lens-cube/src/main/java/org/apache/lens/cube/metadata/Cube.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Cube.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Cube.java
index 98fb92e..f09da37 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Cube.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Cube.java
@@ -205,19 +205,19 @@ public class Cube extends AbstractBaseTable implements CubeInterface {
   }
 
   public CubeDimAttribute getDimAttributeByName(String dimension) {
-    return dimMap.get(dimension == null ? dimension : dimension.toLowerCase());
+    return dimMap.get(dimension == null ? null : dimension.toLowerCase());
   }
 
   public CubeMeasure getMeasureByName(String measure) {
-    return measureMap.get(measure == null ? measure : measure.toLowerCase());
+    return measureMap.get(measure == null ? null : measure.toLowerCase());
   }
 
   public CubeColumn getColumnByName(String column) {
-    CubeColumn cubeCol = (CubeColumn) super.getExpressionByName(column);
+    CubeColumn cubeCol = super.getExpressionByName(column);
     if (cubeCol == null) {
-      cubeCol = (CubeColumn) getMeasureByName(column);
+      cubeCol = getMeasureByName(column);
       if (cubeCol == null) {
-        cubeCol = (CubeColumn) getDimAttributeByName(column);
+        cubeCol = getDimAttributeByName(column);
       }
     }
     return cubeCol;

http://git-wip-us.apache.org/repos/asf/lens/blob/7b5f4a09/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index daf7434..f945e0f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -651,7 +651,7 @@ public class CubeMetastoreClient {
    * @throws HiveException
    */
   public void createDerivedCube(String parent, String name, Set<String> measures, Set<String> dimensions,
-    Map<String, String> properties, double weight) throws HiveException {
+    Map<String, String> properties, double weight) throws HiveException, LensException {
     DerivedCube cube = new DerivedCube(name, measures, dimensions, properties, weight, (Cube) getCube(parent));
     createCube(cube);
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/7b5f4a09/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java
index e7012f7..3c30f78 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DerivedCube.java
@@ -20,11 +20,16 @@ package org.apache.lens.cube.metadata;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 
+import com.google.common.collect.Lists;
+
 public class DerivedCube extends AbstractCubeTable implements CubeInterface {
 
   private static final List<FieldSchema> COLUMNS = new ArrayList<FieldSchema>();
@@ -37,12 +42,12 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
   private final Set<String> measures = new HashSet<String>();
   private final Set<String> dimensions = new HashSet<String>();
 
-  public DerivedCube(String name, Set<String> measures, Set<String> dimensions, Cube parent) {
+  public DerivedCube(String name, Set<String> measures, Set<String> dimensions, Cube parent) throws LensException {
     this(name, measures, dimensions, new HashMap<String, String>(), 0L, parent);
   }
 
   public DerivedCube(String name, Set<String> measures, Set<String> dimensions, Map<String, String> properties,
-    double weight, Cube parent) {
+    double weight, Cube parent) throws LensException {
     super(name, COLUMNS, properties, weight);
     for (String msr : measures) {
       this.measures.add(msr.toLowerCase());
@@ -51,10 +56,42 @@ public class DerivedCube extends AbstractCubeTable implements CubeInterface {
       this.dimensions.add(dim.toLowerCase());
     }
     this.parent = parent;
-
+    validate();
     addProperties();
   }
 
+  public void validate() throws LensException {
+    List<String> measuresNotInParentCube = Lists.newArrayList();
+    List<String> dimAttributesNotInParentCube = Lists.newArrayList();
+    for (String msr : measures) {
+      if (parent.getMeasureByName(msr) == null) {
+        measuresNotInParentCube.add(msr);
+      }
+    }
+    for (String dim : dimensions) {
+      if (parent.getDimAttributeByName(dim) == null) {
+        dimAttributesNotInParentCube.add(dim);
+      }
+    }
+    StringBuilder validationErrorStringBuilder = new StringBuilder();
+    String sep = "";
+    boolean invalid = false;
+    if (!measuresNotInParentCube.isEmpty()) {
+      validationErrorStringBuilder.append(sep).append("Measures ").append(measuresNotInParentCube);
+      sep = " and ";
+      invalid = true;
+    }
+    if (!dimAttributesNotInParentCube.isEmpty()) {
+      validationErrorStringBuilder.append(sep).append("Dim Attributes ").append(dimAttributesNotInParentCube);
+      invalid = true;
+    }
+    if (invalid) {
+      throw new LensException(LensCubeErrorCode.ERROR_IN_ENTITY_DEFINITION.getLensErrorInfo(),
+        "Derived cube invalid: " + validationErrorStringBuilder.append(" were not present in " + "parent cube ")
+          .append(parent));
+    }
+  }
+
   public DerivedCube(Table tbl, Cube parent) {
     super(tbl);
     this.measures.addAll(getMeasures(getName(), getProperties()));

http://git-wip-us.apache.org/repos/asf/lens/blob/7b5f4a09/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index 7d5ed0f..6a2dc50 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -114,7 +114,7 @@ public class TestCubeMetastoreClient {
   }
 
   @BeforeClass
-  public static void setup() throws HiveException, AlreadyExistsException, ParseException {
+  public static void setup() throws HiveException, AlreadyExistsException, LensException {
     SessionState.start(conf);
     now = new Date();
     Calendar cal = Calendar.getInstance();
@@ -158,7 +158,7 @@ public class TestCubeMetastoreClient {
   }
 
   private static void defineCube(String cubeName, String cubeNameWithProps, String derivedCubeName,
-    String derivedCubeNameWithProps) throws ParseException {
+    String derivedCubeNameWithProps) throws LensException {
     cubeMeasures = new HashSet<CubeMeasure>();
     cubeMeasures.add(new ColumnMeasure(
       new FieldSchema("msr1", "int", "first measure"), null, null, null, null, null, null, null, 0.0, 9999.0));
@@ -299,7 +299,7 @@ public class TestCubeMetastoreClient {
       new DerivedCube(derivedCubeNameWithProps, measures, dimensions, CUBE_PROPERTIES, 0L, cubeWithProps);
   }
 
-  private static void defineUberDims() throws ParseException {
+  private static void defineUberDims() {
     // Define zip dimension
     zipAttrs.add(new BaseDimAttribute(new FieldSchema("zipcode", "int", "code")));
     zipAttrs.add(new BaseDimAttribute(new FieldSchema("f1", "string", "field1")));
@@ -2541,7 +2541,7 @@ public class TestCubeMetastoreClient {
   }
 
   @Test(priority = 3)
-  public void testCaching() throws HiveException, ParseException {
+  public void testCaching() throws HiveException,  LensException {
     client = CubeMetastoreClient.getInstance(conf);
     CubeMetastoreClient client2 = CubeMetastoreClient.getInstance(new HiveConf(TestCubeMetastoreClient.class));
     assertEquals(5, client.getAllCubes().size());

http://git-wip-us.apache.org/repos/asf/lens/blob/7b5f4a09/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 826f6b6..92f5067 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -27,6 +27,7 @@ import static org.apache.lens.cube.metadata.UpdatePeriod.*;
 
 import static org.testng.Assert.assertEquals;
 import static org.testng.Assert.assertNotNull;
+import static org.testng.Assert.assertTrue;
 
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
@@ -585,7 +586,7 @@ public class CubeTestSetup {
 
   Set<ExprColumn> exprs;
 
-  private void createCube(CubeMetastoreClient client) throws HiveException, ParseException {
+  private void createCube(CubeMetastoreClient client) throws HiveException, ParseException, LensException {
     cubeMeasures = new HashSet<CubeMeasure>();
     cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr1", "int", "first measure")));
     cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr2", "float", "second measure"), "Measure2", null, "SUM",
@@ -749,8 +750,18 @@ public class CubeTestSetup {
     Set<String> dimensions = new HashSet<String>();
     dimensions.add("dim1");
     dimensions.add("dim2");
-    client
-      .createDerivedCube(TEST_CUBE_NAME, DERIVED_CUBE_NAME, measures, dimensions, new HashMap<String, String>(), 5L);
+    // Try creating derived cube with non existant dim/measures
+    try{
+      client.createDerivedCube(TEST_CUBE_NAME, DERIVED_CUBE_NAME,
+        Sets.newHashSet("random_measure"), Sets.newHashSet("random_dim_attribute"),
+        new HashMap<String, String>(), 5L);
+    } catch(LensException e) {
+      assertTrue(e.getMessage().contains("random_measure"));
+      assertTrue(e.getMessage().contains("random_dim_attribute"));
+      assertTrue(e.getMessage().contains("not present"));
+    }
+    client.createDerivedCube(TEST_CUBE_NAME, DERIVED_CUBE_NAME,
+      measures, dimensions, new HashMap<String, String>(), 5L);
   }
 
   private void createBaseAndDerivedCubes(CubeMetastoreClient client)

http://git-wip-us.apache.org/repos/asf/lens/blob/7b5f4a09/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
index ea913ad..27f9e0e 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
@@ -28,7 +28,6 @@ import javax.ws.rs.NotFoundException;
 import org.apache.lens.api.LensSessionHandle;
 import org.apache.lens.api.metastore.*;
 import org.apache.lens.cube.metadata.*;
-import org.apache.lens.cube.metadata.Dimension;
 import org.apache.lens.cube.metadata.timeline.PartitionTimeline;
 import org.apache.lens.server.BaseLensService;
 import org.apache.lens.server.LensServerConf;
@@ -40,10 +39,10 @@ import org.apache.lens.server.session.LensSessionImpl;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.*;
-import org.apache.hadoop.hive.ql.metadata.*;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.apache.hive.service.cli.CLIService;
 
 import com.google.common.collect.Lists;
@@ -200,8 +199,6 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
       log.info("Created cube " + cube.getName());
     } catch (HiveException e) {
       throw new LensException(e);
-    } catch (ParseException e) {
-      throw new LensException(e);
     } finally {
       release(sessionid);
     }
@@ -264,8 +261,6 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
       log.info("Cube updated " + cube.getName());
     } catch (HiveException e) {
       throw new LensException(e);
-    } catch (ParseException e) {
-      throw new LensException(e);
     } finally {
       release(sessionid);
     }
@@ -1108,8 +1103,6 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
       log.info("Created dimension " + dimension.getName());
     } catch (HiveException e) {
       throw new LensException(e);
-    } catch (ParseException e) {
-      throw new LensException(e);
     } finally {
       release(sessionid);
     }
@@ -1152,8 +1145,6 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
       log.info("Altered dimension " + dimName);
     } catch (HiveException e) {
       throw new LensException(e);
-    } catch (ParseException e) {
-      throw new LensException(e);
     } finally {
       release(sessionid);
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/7b5f4a09/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
index 1ed81eb..f5db7bc 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
@@ -19,6 +19,7 @@
 package org.apache.lens.server.metastore;
 
 import java.lang.reflect.Constructor;
+import java.text.ParseException;
 import java.util.*;
 
 import javax.ws.rs.WebApplicationException;
@@ -30,6 +31,7 @@ import org.apache.lens.api.metastore.*;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
 import org.apache.lens.cube.metadata.ReferencedDimAtrribute.ChainRefCol;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -39,7 +41,6 @@ import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.mapred.InputFormat;
 
@@ -59,13 +60,13 @@ public final class JAXBUtils {
   private static final ObjectFactory XCF = new ObjectFactory();
 
   /**
-   * Create a hive ql cube obejct from corresponding JAXB object
+   * Create a hive ql cube object from corresponding JAXB object
    *
    * @param cube JAXB Cube
    * @return {@link Cube}
-   * @throws ParseException
+   * @throws LensException
    */
-  public static CubeInterface hiveCubeFromXCube(XCube cube, Cube parent) throws ParseException {
+  public static CubeInterface hiveCubeFromXCube(XCube cube, Cube parent) throws LensException {
     if (cube instanceof XDerivedCube) {
       XDerivedCube dcube = (XDerivedCube) cube;
       Set<String> dims = new LinkedHashSet<String>();
@@ -348,7 +349,7 @@ public final class JAXBUtils {
           }
           refspec.getChainRefColumn().add(xcc);
         }
-        xd.setJoinKey(Boolean.valueOf(false));
+        xd.setJoinKey(false);
       } else {
         List<TableReference> dimRefs = rd.getReferences();
         refspec.setTableReferences(new XTableReferences());
@@ -477,7 +478,7 @@ public final class JAXBUtils {
     return jc;
   }
 
-  public static ExprColumn hiveExprColumnFromXExprColumn(XExprColumn xe) throws ParseException {
+  public static ExprColumn hiveExprColumnFromXExprColumn(XExprColumn xe) {
     ExprColumn ec = new ExprColumn(new FieldSchema(xe.getName(), xe.getType().toLowerCase(),
       xe.getDescription()),
       xe.getDisplayString(),
@@ -601,7 +602,7 @@ public final class JAXBUtils {
     try {
       Class<?> clazz = Class.forName(xs.getClassname());
       Constructor<?> constructor = clazz.getConstructor(String.class);
-      storage = (Storage) constructor.newInstance(new Object[]{xs.getName()});
+      storage = (Storage) constructor.newInstance(xs.getName());
       storage.addProperties(mapFromXProperties(xs.getProperties()));
       return storage;
     } catch (Exception e) {
@@ -664,14 +665,12 @@ public final class JAXBUtils {
 
   public static CubeDimensionTable cubeDimTableFromDimTable(XDimensionTable dimensionTable) {
 
-    CubeDimensionTable cdim = new CubeDimensionTable(dimensionTable.getDimensionName(),
+    return new CubeDimensionTable(dimensionTable.getDimensionName(),
       dimensionTable.getTableName(),
       fieldSchemaListFromColumns(dimensionTable.getColumns()),
       dimensionTable.getWeight(),
       dumpPeriodsFromStorageTables(dimensionTable.getStorageTables()),
       mapFromXProperties(dimensionTable.getProperties()));
-
-    return cdim;
   }
 
   public static CubeFactTable cubeFactFromFactTable(XFactTable fact) {
@@ -870,7 +869,7 @@ public final class JAXBUtils {
           xp.getNonTimePartitionSpec().getPartSpecElement().add(partSpecElement);
         }
       }
-    } catch (java.text.ParseException exc) {
+    } catch (ParseException exc) {
       log.debug("can't form time part spec from " + partSpec, exc);
       xp.setTimePartitionSpec(null);
       xp.setNonTimePartitionSpec(null);
@@ -881,17 +880,16 @@ public final class JAXBUtils {
     return xp;
   }
 
-  public static void updatePartitionFromXPartition(Partition partition, XPartition xp)
-    throws ClassNotFoundException, HiveException {
+  public static void updatePartitionFromXPartition(Partition partition, XPartition xp) throws ClassNotFoundException {
     partition.getParameters().putAll(mapFromXProperties(xp.getPartitionParameters()));
     partition.getTPartition().getSd().getSerdeInfo().setParameters(mapFromXProperties(xp.getSerdeParameters()));
     partition.setLocation(xp.getLocation());
     if (xp.getInputFormat() != null) {
-      partition.setInputFormatClass((Class<? extends InputFormat>) Class.forName(xp.getInputFormat()));
+      partition.setInputFormatClass(Class.forName(xp.getInputFormat()).asSubclass(InputFormat.class));
     }
     if (xp.getOutputFormat() != null) {
       Class<? extends HiveOutputFormat> outputFormatClass =
-        (Class<? extends HiveOutputFormat>) Class.forName(xp.getOutputFormat());
+        Class.forName(xp.getOutputFormat()).asSubclass(HiveOutputFormat.class);
       partition.setOutputFormatClass(outputFormatClass);
       // Again a hack, for the issue described in HIVE-11278
       partition.getTPartition().getSd().setOutputFormat(
@@ -926,7 +924,7 @@ public final class JAXBUtils {
     return ret;
   }
 
-  public static Dimension dimensionFromXDimension(XDimension dimension) throws ParseException {
+  public static Dimension dimensionFromXDimension(XDimension dimension) {
     Set<CubeDimAttribute> dims = new LinkedHashSet<CubeDimAttribute>();
     for (XDimAttribute xd : dimension.getAttributes().getDimAttribute()) {
       dims.add(hiveDimAttrFromXDimAttr(xd));

http://git-wip-us.apache.org/repos/asf/lens/blob/7b5f4a09/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
index ca0fe56..e6a798a 100644
--- a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
@@ -417,7 +417,7 @@ public class TestMetastoreService extends LensJerseyTest {
     return cube;
   }
 
-  private XDerivedCube createDerivedCube(String cubeName, String parent) throws Exception {
+  private XDerivedCube createDerivedCube(String cubeName, String parent, boolean addExtraFields) throws Exception {
     XDerivedCube cube = cubeObjectFactory.createXDerivedCube();
     cube.setName(cubeName);
     cube.setDimAttrNames(new XDimAttrNames());
@@ -426,7 +426,10 @@ public class TestMetastoreService extends LensJerseyTest {
 
     cube.getDimAttrNames().getAttrName().add("dim1");
     cube.getMeasureNames().getMeasureName().add("msr1");
-
+    if (addExtraFields) {
+      cube.getDimAttrNames().getAttrName().add("random_dim");
+      cube.getMeasureNames().getMeasureName().add("random_measure");
+    }
     XProperty xp1 = cubeObjectFactory.createXProperty();
     xp1.setName("derived.foo");
     xp1.setValue("derived.bar");
@@ -469,13 +472,20 @@ public class TestMetastoreService extends LensJerseyTest {
       }
 
       assertTrue(foundcube);
-
+      // create invalid derived cube
+      XCube dcube = createDerivedCube("testderived", "testCube1", true);
+      result = target.queryParam("sessionid", lensSessionId).request(
+        mediaType).post(Entity.xml(cubeObjectFactory.createXCube(dcube)), APIResult.class);
+      assertEquals(result.getStatus(), Status.FAILED);
+      assertEquals(result.getMessage(), "ERROR_IN_ENTITY_DEFINITION[Derived cube invalid: "
+        + "Measures [random_measure] and Dim Attributes "
+        + "[random_dim] were not present in parent cube testcube1]");
       // create derived cube
-      final XCube dcube = createDerivedCube("testderived", "testCube1");
+      dcube = createDerivedCube("testderived", "testCube1", false);
       result = target.queryParam("sessionid", lensSessionId).request(
         mediaType).post(Entity.xml(cubeObjectFactory.createXCube(dcube)), APIResult.class);
       assertNotNull(result);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertEquals(result.getStatus(), Status.SUCCEEDED);
 
       cubes = target().path("metastore/cubes").queryParam("sessionid", lensSessionId).request(mediaType)
         .get(StringList.class);
@@ -706,7 +716,7 @@ public class TestMetastoreService extends LensJerseyTest {
       Assert.assertTrue(links.get(1).isMapsToMany());
       Assert.assertEquals(links.get(1).toString(), "testdim.col1[n]");
 
-      final XDerivedCube dcube = createDerivedCube("testGetDerivedCube", "testGetCube");
+      final XDerivedCube dcube = createDerivedCube("testGetDerivedCube", "testGetCube", false);
       target = target().path("metastore").path("cubes");
       // Create this cube first
       element = cubeObjectFactory.createXCube(dcube);
@@ -746,7 +756,7 @@ public class TestMetastoreService extends LensJerseyTest {
         target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
       assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
 
-      final XCube dcube = createDerivedCube("test_drop_derived_cube", "test_drop_cube");
+      final XCube dcube = createDerivedCube("test_drop_derived_cube", "test_drop_cube", false);
       target = target().path("metastore").path("cubes");
       // Create this cube first
       element = cubeObjectFactory.createXCube(dcube);
@@ -841,7 +851,15 @@ public class TestMetastoreService extends LensJerseyTest {
       assertNotNull(hcube.getDimAttributeByName("dim3"));
       assertEquals(((AbstractCubeTable) hcube).getProperties().get("foo2"), "bar2");
 
-      final XDerivedCube dcube = createDerivedCube("test_update_derived", cubeName);
+      XDerivedCube dcube = createDerivedCube("test_update_derived", cubeName, true);
+      element = cubeObjectFactory.createXCube(dcube);
+      result =
+        target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
+      assertEquals(result.getStatus(), Status.FAILED);
+      assertEquals(result.getMessage(), "ERROR_IN_ENTITY_DEFINITION[Derived cube invalid: "
+        + "Measures [random_measure] and Dim Attributes "
+        + "[random_dim] were not present in parent cube test_update]");
+      dcube = createDerivedCube("test_update_derived", cubeName, false);
       // Create this cube first
       element = cubeObjectFactory.createXCube(dcube);
       result =
@@ -1262,7 +1280,7 @@ public class TestMetastoreService extends LensJerseyTest {
       assertEquals(col4h3.getType(), "string");
       assertEquals(col4h3.getChainRefColumns().get(0).getChainName(), "chain1");
       assertEquals(col4h3.getChainRefColumns().get(0).getRefColumn(), "col2");
-      assertEquals(col4h3.getNumOfDistinctValues().get(), (Long)1000L);
+      assertEquals(col4h3.getNumOfDistinctValues().get(), (Long) 1000L);
       assertNotNull(dim.getAttributeByName("col5"));
       ReferencedDimAtrribute col5 = (ReferencedDimAtrribute) dim.getAttributeByName("col5");
       assertEquals(col5.getDescription(), "ref column");


[20/50] [abbrv] lens git commit: LENS-871 : Fix Dropping any partition in dimtable is clearing latest cache for that dimtable.

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestFactPartition.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestFactPartition.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestFactPartition.java
index 3f2ae6f..e802c3c 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestFactPartition.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestFactPartition.java
@@ -39,8 +39,8 @@ public class TestFactPartition {
 
   @Test
   public void testGetFormattedFilter() throws Exception {
-    String dailyFormat = DAILY.format().format(DATE);
-    String hourlyFormat = HOURLY.format().format(DATE);
+    String dailyFormat = DAILY.format(DATE);
+    String hourlyFormat = HOURLY.format(DATE);
     assertEquals(fp1.getFormattedFilter("table"), "table.p = '" + dailyFormat + "'");
     assertEquals(fp2.getFormattedFilter("table2"),
       "table2.p = '" + dailyFormat + "' AND table2.q = '" + hourlyFormat + "'");

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestTimePartition.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestTimePartition.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestTimePartition.java
index 2cbbd0c..729bcab 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestTimePartition.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestTimePartition.java
@@ -39,7 +39,7 @@ public class TestTimePartition {
   @Test(dataProvider = "update-periods")
   public void test(UpdatePeriod up) throws LensException {
     // Normal date object parsable
-    String nowStr = up.format().format(NOW);
+    String nowStr = up.format(NOW);
     // Create partition by date object or it's string representation -- both should be same.
     TimePartition nowPartition = TimePartition.of(up, NOW);
     TimePartition nowStrPartition = TimePartition.of(up, nowStr);

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/test/java/org/apache/lens/cube/metadata/UpdatePeriodTest.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/UpdatePeriodTest.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/UpdatePeriodTest.java
index b21341d..b7ee8cc 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/UpdatePeriodTest.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/UpdatePeriodTest.java
@@ -54,7 +54,7 @@ public class UpdatePeriodTest {
     for (int i = 0; i < 5000; i++) {
       Date randomDate = randomDateGenerator.nextDate();
       randomDate = period.truncate(randomDate);
-      assertEquals(randomDate, period.format().parse(period.format().format(randomDate)));
+      assertEquals(randomDate, period.parse(period.format(randomDate)));
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 999faa0..1357035 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -19,15 +19,11 @@
 
 package org.apache.lens.cube.parse;
 
-import static java.util.Calendar.DAY_OF_MONTH;
-import static java.util.Calendar.HOUR_OF_DAY;
-import static java.util.Calendar.MONTH;
+import static java.util.Calendar.*;
 
 import static org.apache.lens.cube.metadata.UpdatePeriod.*;
 
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotNull;
-import static org.testng.Assert.assertTrue;
+import static org.testng.Assert.*;
 
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
@@ -58,7 +54,6 @@ import org.apache.hadoop.mapred.TextInputFormat;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-
 import com.google.common.collect.Sets;
 import lombok.Getter;
 import lombok.extern.slf4j.Slf4j;
@@ -105,7 +100,6 @@ public class CubeTestSetup {
   public static final String DERIVED_CUBE_NAME1 = "der1";
   public static final String DERIVED_CUBE_NAME2 = "der2";
   public static final String DERIVED_CUBE_NAME3 = "der3";
-  public static final String DERIVED_CUBE_NAME4 = "der4";
 
   // Time Instances as Date Type
   public static final Date NOW;
@@ -140,9 +134,31 @@ public class CubeTestSetup {
   private static Map<String, String> factValidityProperties = Maps.newHashMap();
   @Getter
   private static Map<String, List<UpdatePeriod>> storageToUpdatePeriodMap = new LinkedHashMap<>();
+  public static class DateOffsetProvider extends HashMap<Integer, Date> {
+    private final UpdatePeriod updatePeriod;
+    Calendar calendar = Calendar.getInstance();
+
+    public DateOffsetProvider(UpdatePeriod updatePeriod) {
+      this.updatePeriod = updatePeriod;
+    }
+    {
+      put(0, calendar.getTime());
+    }
+
+    @Override
+    public Date get(Object key) {
+      if (!containsKey(key) && key instanceof Integer) {
+        calendar.setTime(super.get(0));
+        calendar.add(updatePeriod.calendarField(), (Integer) key);
+        put((Integer) key, calendar.getTime());
+      }
+      return super.get(key);
+    }
+  }
 
   static {
     Calendar cal = Calendar.getInstance();
+    // Keep in sync
     NOW = cal.getTime();
     log.debug("Test now:{}", NOW);
 
@@ -280,24 +296,18 @@ public class CubeTestSetup {
 
   public static String getExpectedQuery(String cubeName, String selExpr, String whereExpr, String postWhereExpr,
     String rangeWhere, String storageTable, List<String> notLatestConditions) {
-    StringBuilder expected = new StringBuilder();
-    expected.append(selExpr);
-    expected.append(getDbName() + storageTable);
-    expected.append(" ");
-    expected.append(cubeName);
-    expected.append(" WHERE ");
-    expected.append("(");
+    StringBuilder expected = new StringBuilder()
+      .append(selExpr).append(getDbName()).append(storageTable).append(" ").append(cubeName)
+      .append(" WHERE ").append("(");
     if (notLatestConditions != null) {
       for (String cond : notLatestConditions) {
         expected.append(cond).append(" AND ");
       }
     }
     if (whereExpr != null) {
-      expected.append(whereExpr);
-      expected.append(" AND ");
+      expected.append(whereExpr).append(" AND ");
     }
-    expected.append(rangeWhere);
-    expected.append(")");
+    expected.append(rangeWhere).append(")");
     if (postWhereExpr != null) {
       expected.append(postWhereExpr);
     }
@@ -1561,7 +1571,7 @@ public class CubeTestSetup {
       TimePartition tp = TimePartition.of(HOURLY, temp);
       ttdStoreAll.add(tp);
       ttd2StoreAll.add(tp);
-      partitions.add(HOURLY.format().format(temp));
+      partitions.add(HOURLY.format(temp));
       StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, HOURLY);
       storagePartitionDescs.add(sPartSpec);
       cal.add(HOUR_OF_DAY, 1);
@@ -2036,8 +2046,6 @@ public class CubeTestSetup {
     dimColumns.add(new FieldSchema("name", "string", "field1"));
     dimColumns.add(new FieldSchema("cyleDim2Id", "string", "link to cyclic dim 2"));
 
-    Map<String, List<TableReference>> dimensionReferences = new HashMap<String, List<TableReference>>();
-    dimensionReferences.put("cyleDim2Id", Arrays.asList(new TableReference("cycleDim2", "id")));
 
     Map<String, UpdatePeriod> dumpPeriods = new HashMap<String, UpdatePeriod>();
     ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 1ea22b7..ee84a4c 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -500,8 +500,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     assertEquals(
       pruneCause.getDetails().get("testfact2_base").iterator().next().getMissingPartitions().iterator().next(),
       "ttd:["
-        + UpdatePeriod.SECONDLY.format().format(DateUtils.addDays(DateUtils.truncate(TWODAYS_BACK, Calendar.HOUR), -10))
-        + ", " + UpdatePeriod.SECONDLY.format().format(DateUtils.addDays(DateUtils.truncate(NOW, Calendar.HOUR), 10))
+        + UpdatePeriod.SECONDLY.format(DateUtils.addDays(DateUtils.truncate(TWODAYS_BACK, Calendar.HOUR), -10))
+        + ", " + UpdatePeriod.SECONDLY.format(DateUtils.addDays(DateUtils.truncate(NOW, Calendar.HOUR), 10))
         + ")");
 
     // fail on partial false. Should go to fallback column. Also testing transitivity of timedim relations

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
index e20e0e8..9a2493c 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
@@ -67,7 +67,7 @@ public class TestBetweenTimeRangeWriter extends TestTimeRangeWriter {
   public void validateSingle(String whereClause, DateFormat format) {
     List<String> parts = new ArrayList<String>();
     if (format == null) {
-      parts.add(UpdatePeriod.DAILY.format().format(CubeTestSetup.ONE_DAY_BACK));
+      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.ONE_DAY_BACK));
     } else {
       parts.add(format.format(CubeTestSetup.ONE_DAY_BACK));
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java
index 0e7db1b..4a23818 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestORTimeRangeWriter.java
@@ -43,9 +43,9 @@ public class TestORTimeRangeWriter extends TestTimeRangeWriter {
   public void validateDisjoint(String whereClause, DateFormat format) {
     List<String> parts = new ArrayList<String>();
     if (format == null) {
-      parts.add(UpdatePeriod.MONTHLY.format().format(CubeTestSetup.TWO_MONTHS_BACK));
-      parts.add(UpdatePeriod.DAILY.format().format(CubeTestSetup.TWODAYS_BACK));
-      parts.add(UpdatePeriod.HOURLY.format().format(CubeTestSetup.NOW));
+      parts.add(UpdatePeriod.MONTHLY.format(CubeTestSetup.TWO_MONTHS_BACK));
+      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.TWODAYS_BACK));
+      parts.add(UpdatePeriod.HOURLY.format(CubeTestSetup.NOW));
     } else {
       parts.add(format.format(CubeTestSetup.TWO_MONTHS_BACK));
       parts.add(format.format(CubeTestSetup.TWODAYS_BACK));
@@ -60,9 +60,9 @@ public class TestORTimeRangeWriter extends TestTimeRangeWriter {
   public void validateConsecutive(String whereClause, DateFormat format) {
     List<String> parts = new ArrayList<String>();
     if (format == null) {
-      parts.add(UpdatePeriod.DAILY.format().format(CubeTestSetup.ONE_DAY_BACK));
-      parts.add(UpdatePeriod.DAILY.format().format(CubeTestSetup.TWODAYS_BACK));
-      parts.add(UpdatePeriod.DAILY.format().format(CubeTestSetup.NOW));
+      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.ONE_DAY_BACK));
+      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.TWODAYS_BACK));
+      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.NOW));
     } else {
       parts.add(format.format(CubeTestSetup.ONE_DAY_BACK));
       parts.add(format.format(CubeTestSetup.TWODAYS_BACK));
@@ -77,7 +77,7 @@ public class TestORTimeRangeWriter extends TestTimeRangeWriter {
   public void validateSingle(String whereClause, DateFormat format) {
     List<String> parts = new ArrayList<String>();
     if (format == null) {
-      parts.add(UpdatePeriod.DAILY.format().format(CubeTestSetup.ONE_DAY_BACK));
+      parts.add(UpdatePeriod.DAILY.format(CubeTestSetup.ONE_DAY_BACK));
     } else {
       parts.add(format.format(CubeTestSetup.ONE_DAY_BACK));
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
index fc57c94..722a2da 100644
--- a/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
+++ b/lens-driver-hive/src/test/java/org/apache/lens/driver/hive/TestHiveDriver.java
@@ -909,7 +909,7 @@ public class TestHiveDriver {
         p = UpdatePeriod.HOURLY;
         break;
       }
-      FactPartition part = new FactPartition(partEls[0], p.format().parse(partSpec), p, null, p.format(),
+      FactPartition part = new FactPartition(partEls[0], p.parse(partSpec), p, null, p.format(),
         Collections.singleton("table1"));
       factParts.add(part);
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-server-api/src/main/java/org/apache/lens/server/api/util/LensUtil.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/util/LensUtil.java b/lens-server-api/src/main/java/org/apache/lens/server/api/util/LensUtil.java
index 6dcb2d7..c3b6d26 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/util/LensUtil.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/util/LensUtil.java
@@ -18,6 +18,7 @@
  */
 package org.apache.lens.server.api.util;
 
+import java.util.HashMap;
 import java.util.Set;
 
 import org.apache.lens.server.api.common.ConfigBasedObjectCreationFactory;
@@ -83,4 +84,13 @@ public final class LensUtil {
       throw new IllegalStateException(e);
     }
   }
+
+  public static <K, V> HashMap<K, V> getHashMap(Object... args) {
+    assert (args.length % 2 == 0);
+    HashMap<K, V> map = new HashMap<>();
+    for (int i = 0; i < args.length; i += 2) {
+      map.put((K) args[i], (V) args[i + 1]);
+    }
+    return map;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
index 27f9e0e..66ed938 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
@@ -870,7 +870,7 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
         String dateStr = partSpec.get(partCol);
         Date date = null;
         try {
-          date = period.format().parse(dateStr);
+          date = period.parse(dateStr);
         } catch (Exception e) {
           continue;
         }
@@ -919,11 +919,9 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
     String storageName, String filter) throws LensException {
     try {
       acquire(sessionid);
-      String tableName = MetastoreUtil.getStorageTableName(cubeTableName,
-        Storage.getPrefix(storageName));
+      String tableName = MetastoreUtil.getStorageTableName(cubeTableName, Storage.getPrefix(storageName));
       CubeMetastoreClient msClient = getClient(sessionid);
-      List<Partition> partitions = msClient.getPartitionsByFilter(
-        tableName, filter);
+      List<Partition> partitions = msClient.getPartitionsByFilter(tableName, filter);
       for (Partition part : partitions) {
         try {
           Map<String, Date> timeSpec = new HashMap<String, Date>();

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
index f5db7bc..a5883f7 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
@@ -859,7 +859,7 @@ public final class JAXBUtils {
           XTimePartSpecElement timePartSpecElement = new XTimePartSpecElement();
           timePartSpecElement.setKey(entry.getKey());
           timePartSpecElement
-            .setValue(getXMLGregorianCalendar(UpdatePeriod.valueOf(xp.getUpdatePeriod().name()).format().parse(
+            .setValue(getXMLGregorianCalendar(UpdatePeriod.valueOf(xp.getUpdatePeriod().name()).parse(
               entry.getValue())));
           xp.getTimePartitionSpec().getPartSpecElement().add(timePartSpecElement);
         } else {

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
index 5f9f639..3bc692e 100644
--- a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
@@ -28,15 +28,20 @@ import java.util.*;
 import javax.ws.rs.BadRequestException;
 import javax.ws.rs.NotFoundException;
 import javax.ws.rs.client.Entity;
-import javax.ws.rs.client.Invocation;
 import javax.ws.rs.client.WebTarget;
-import javax.ws.rs.core.*;
+import javax.ws.rs.core.Application;
+import javax.ws.rs.core.GenericType;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
 import javax.xml.bind.JAXBElement;
 import javax.xml.datatype.DatatypeFactory;
 import javax.xml.datatype.XMLGregorianCalendar;
 
-import org.apache.lens.api.*;
+import org.apache.lens.api.APIResult;
 import org.apache.lens.api.APIResult.Status;
+import org.apache.lens.api.DateTime;
+import org.apache.lens.api.LensSessionHandle;
+import org.apache.lens.api.StringList;
 import org.apache.lens.api.error.LensCommonErrorCode;
 import org.apache.lens.api.metastore.*;
 import org.apache.lens.api.result.LensAPIResult;
@@ -46,6 +51,7 @@ import org.apache.lens.server.LensJerseyTest;
 import org.apache.lens.server.LensServices;
 import org.apache.lens.server.LensTestUtil;
 import org.apache.lens.server.api.metastore.CubeMetastoreService;
+import org.apache.lens.server.api.util.LensUtil;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.time.DateUtils;
@@ -56,12 +62,17 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 
 import org.glassfish.jersey.client.ClientConfig;
-import org.glassfish.jersey.media.multipart.*;
+import org.glassfish.jersey.media.multipart.FormDataBodyPart;
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataMultiPart;
+import org.glassfish.jersey.media.multipart.MultiPartFeature;
 import org.testng.Assert;
 import org.testng.annotations.AfterTest;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
@@ -74,6 +85,10 @@ public class TestMetastoreService extends LensJerseyTest {
   CubeMetastoreServiceImpl metastoreService;
   LensSessionHandle lensSessionId;
 
+  private void assertSuccess(APIResult result) {
+    assertEquals(result.getStatus(), Status.SUCCEEDED, String.valueOf(result));
+  }
+
   @BeforeTest
   public void setUp() throws Exception {
     super.setUp();
@@ -114,17 +129,17 @@ public class TestMetastoreService extends LensJerseyTest {
     APIResult result = target().path("metastore").path("databases")
       .queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(dbName), APIResult.class);
     assertNotNull(result);
-    assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(result);
 
     // set
     result = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType)
       .put(Entity.xml(dbName), APIResult.class);
     assertNotNull(result);
-    assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(result);
 
     // set without session id, we should get bad request
     try {
-      result = dbTarget.request(mediaType).put(Entity.xml(dbName), APIResult.class);
+      dbTarget.request(mediaType).put(Entity.xml(dbName), APIResult.class);
       fail("Should have thrown bad request exception");
     } catch (BadRequestException badReq) {
       // expected
@@ -142,7 +157,7 @@ public class TestMetastoreService extends LensJerseyTest {
     APIResult result = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType)
       .post(Entity.xml(newDb), APIResult.class);
     assertNotNull(result);
-    assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(result);
 
     // Create again
     result = dbTarget.queryParam("sessionid", lensSessionId).queryParam("ignoreIfExisting", false)
@@ -161,13 +176,13 @@ public class TestMetastoreService extends LensJerseyTest {
     // First create the database
     APIResult create = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType)
       .post(Entity.xml(dbName), APIResult.class);
-    assertEquals(create.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(create);
 
     // Now drop it
     APIResult drop = dbTarget.path(dbName)
       .queryParam("cascade", "true")
       .queryParam("sessionid", lensSessionId).request(mediaType).delete(APIResult.class);
-    assertEquals(drop.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(drop);
   }
 
   @Test
@@ -189,7 +204,7 @@ public class TestMetastoreService extends LensJerseyTest {
       Assert.assertTrue(allDbs.getElements().contains(db));
     }
 
-    List<String> expected = new ArrayList<String>(Arrays.asList(dbsToCreate));
+    List<String> expected = Lists.newArrayList(dbsToCreate);
     // Default is always there
     expected.add("default");
 
@@ -206,7 +221,7 @@ public class TestMetastoreService extends LensJerseyTest {
     APIResult result = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType)
       .post(Entity.xml(dbName), APIResult.class);
     assertNotNull(result);
-    assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(result);
   }
 
   private void createStorage(String storageName) throws Exception {
@@ -224,7 +239,7 @@ public class TestMetastoreService extends LensJerseyTest {
     APIResult result = target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(
       cubeObjectFactory.createXStorage(xs)), APIResult.class);
     assertNotNull(result);
-    assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(result);
   }
 
   private void dropStorage(String storageName) throws Exception {
@@ -232,7 +247,7 @@ public class TestMetastoreService extends LensJerseyTest {
 
     APIResult result = target
       .queryParam("sessionid", lensSessionId).request(mediaType).delete(APIResult.class);
-    assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(result);
   }
 
   private void dropDatabase(String dbName) throws Exception {
@@ -240,21 +255,19 @@ public class TestMetastoreService extends LensJerseyTest {
 
     APIResult result = dbTarget.queryParam("cascade", "true")
       .queryParam("sessionid", lensSessionId).request(mediaType).delete(APIResult.class);
-    assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(result);
   }
 
   private void setCurrentDatabase(String dbName) throws Exception {
     WebTarget dbTarget = target().path("metastore").path("databases/current");
     APIResult result = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType).put(Entity.xml(dbName),
       APIResult.class);
-    assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(result);
   }
 
   private String getCurrentDatabase() throws Exception {
-    WebTarget dbTarget = target().path("metastore").path("databases/current");
-    Invocation.Builder builder = dbTarget.queryParam("sessionid", lensSessionId).request(mediaType);
-    String response = builder.get(String.class);
-    return response;
+    return target().path("metastore").path("databases/current")
+      .queryParam("sessionid", lensSessionId).request(mediaType).get(String.class);
   }
 
   private XBaseCube createTestCube(String cubeName) throws Exception {
@@ -439,6 +452,16 @@ public class TestMetastoreService extends LensJerseyTest {
     return cube;
   }
 
+  private void assertCubesExistence(List<String> cubes, Map<String, Boolean> expectedExistence) {
+    for (String cube : cubes) {
+      for (Map.Entry<String, Boolean> expectedCubeEntry : expectedExistence.entrySet()) {
+        if (cube.equalsIgnoreCase(expectedCubeEntry.getKey())) {
+          assertTrue(expectedCubeEntry.getValue(), expectedCubeEntry.getKey() + " is not supposed to be in the list");
+        }
+      }
+    }
+  }
+
   @Test
   public void testCreateCube() throws Exception {
     final String DB = dbPFX + "test_create_cube";
@@ -448,10 +471,10 @@ public class TestMetastoreService extends LensJerseyTest {
     try {
       final XCube cube = createTestCube("testCube1");
       final WebTarget target = target().path("metastore").path("cubes");
-      APIResult result = null;
+      APIResult result;
       try {
         // first try without a session id
-        result = target.request(mediaType).post(Entity.xml(cubeObjectFactory.createXCube(cube)), APIResult.class);
+        target.request(mediaType).post(Entity.xml(cubeObjectFactory.createXCube(cube)), APIResult.class);
         fail("Should have thrown bad request exception");
       } catch (BadRequestException badReq) {
         // expected
@@ -459,19 +482,11 @@ public class TestMetastoreService extends LensJerseyTest {
       result = target.queryParam("sessionid", lensSessionId).request(mediaType)
         .post(Entity.xml(cubeObjectFactory.createXCube(cube)), APIResult.class);
       assertNotNull(result);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       StringList cubes = target().path("metastore/cubes").queryParam("sessionid", lensSessionId).request(mediaType)
         .get(StringList.class);
-      boolean foundcube = false;
-      for (String c : cubes.getElements()) {
-        if (c.equalsIgnoreCase("testCube1")) {
-          foundcube = true;
-          break;
-        }
-      }
-
-      assertTrue(foundcube);
+      assertCubesExistence(cubes.getElements(), LensUtil.<String, Boolean>getHashMap("testCube1", Boolean.TRUE));
       // create invalid derived cube
       XCube dcube = createDerivedCube("testderived", "testCube1", true);
       result = target.queryParam("sessionid", lensSessionId).request(
@@ -484,57 +499,26 @@ public class TestMetastoreService extends LensJerseyTest {
       result = target.queryParam("sessionid", lensSessionId).request(
         mediaType).post(Entity.xml(cubeObjectFactory.createXCube(dcube)), APIResult.class);
       assertNotNull(result);
-      assertEquals(result.getStatus(), Status.SUCCEEDED);
+      assertSuccess(result);
 
       cubes = target().path("metastore/cubes").queryParam("sessionid", lensSessionId).request(mediaType)
         .get(StringList.class);
-      boolean foundDcube = false;
-      foundcube = false;
-      for (String c : cubes.getElements()) {
-        if (c.equalsIgnoreCase("testCube1")) {
-          foundcube = true;
-        }
-        if (c.equalsIgnoreCase("testderived")) {
-          foundDcube = true;
-        }
-      }
-
-      assertTrue(foundcube);
-      assertTrue(foundDcube);
 
+      assertCubesExistence(cubes.getElements(),
+        LensUtil.<String, Boolean>getHashMap("testCube1", true, "testderived", true));
       // get all base cubes
       cubes = target().path("metastore/cubes").queryParam("sessionid", lensSessionId)
         .queryParam("type", "base").request(mediaType).get(StringList.class);
-      foundDcube = false;
-      foundcube = false;
-      for (String c : cubes.getElements()) {
-        if (c.equalsIgnoreCase("testCube1")) {
-          foundcube = true;
-        }
-        if (c.equalsIgnoreCase("testderived")) {
-          foundDcube = true;
-        }
-      }
 
-      assertTrue(foundcube);
-      assertFalse(foundDcube);
+      assertCubesExistence(cubes.getElements(),
+        LensUtil.<String, Boolean>getHashMap("testCube1", true, "testderived", false));
 
       // get all derived cubes
       cubes = target().path("metastore/cubes").queryParam("sessionid", lensSessionId)
         .queryParam("type", "derived").request(mediaType).get(StringList.class);
-      foundDcube = false;
-      foundcube = false;
-      for (String c : cubes.getElements()) {
-        if (c.equalsIgnoreCase("testCube1")) {
-          foundcube = true;
-        }
-        if (c.equalsIgnoreCase("testderived")) {
-          foundDcube = true;
-        }
-      }
 
-      assertFalse(foundcube);
-      assertTrue(foundDcube);
+      assertCubesExistence(cubes.getElements(),
+        LensUtil.<String, Boolean>getHashMap("testCube1", false, "testderived", true));
 
       // Create a non queryable cube
       final XCube qcube = createTestCube("testNoQueryCube");
@@ -546,51 +530,21 @@ public class TestMetastoreService extends LensJerseyTest {
       result = target.queryParam("sessionid", lensSessionId).request(
         mediaType).post(Entity.xml(cubeObjectFactory.createXCube(qcube)), APIResult.class);
       assertNotNull(result);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // get all cubes
       cubes = target().path("metastore/cubes").queryParam("sessionid", lensSessionId)
         .queryParam("type", "all").request(mediaType).get(StringList.class);
-      foundDcube = false;
-      foundcube = false;
-      boolean foundQCube = false;
-      for (String c : cubes.getElements()) {
-        if (c.equalsIgnoreCase("testCube1")) {
-          foundcube = true;
-        }
-        if (c.equalsIgnoreCase("testderived")) {
-          foundDcube = true;
-        }
-        if (c.equalsIgnoreCase("testNoQueryCube")) {
-          foundQCube = true;
-        }
-      }
 
-      assertTrue(foundcube);
-      assertTrue(foundDcube);
-      assertTrue(foundQCube);
+      assertCubesExistence(cubes.getElements(),
+        LensUtil.<String, Boolean>getHashMap("testCube1", true, "testderived", true, "testNoQueryCube", true));
 
       // get queryable cubes
       cubes = target().path("metastore/cubes").queryParam("sessionid", lensSessionId)
         .queryParam("type", "queryable").request(mediaType).get(StringList.class);
-      foundDcube = false;
-      foundcube = false;
-      foundQCube = false;
-      for (String c : cubes.getElements()) {
-        if (c.equalsIgnoreCase("testCube1")) {
-          foundcube = true;
-        }
-        if (c.equalsIgnoreCase("testderived")) {
-          foundDcube = true;
-        }
-        if (c.equalsIgnoreCase("testNoQueryCube")) {
-          foundQCube = true;
-        }
-      }
+      assertCubesExistence(cubes.getElements(),
+        LensUtil.<String, Boolean>getHashMap("testCube1", true, "testderived", true, "testNoQueryCube", false));
 
-      assertTrue(foundcube);
-      assertTrue(foundDcube);
-      assertFalse(foundQCube);
     } finally {
       dropDatabase(DB);
       setCurrentDatabase(prevDb);
@@ -623,7 +577,7 @@ public class TestMetastoreService extends LensJerseyTest {
       JAXBElement<XCube> element = cubeObjectFactory.createXCube(cube);
       APIResult result =
         target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Now get
       target = target().path("metastore").path("cubes").path("testGetCube");
@@ -639,7 +593,7 @@ public class TestMetastoreService extends LensJerseyTest {
         cube.getDimAttributes().getDimAttribute().size());
       assertEquals(actual.getExpressions().getExpression().size(), cube.getExpressions().getExpression().size());
       assertEquals(actual.getJoinChains().getJoinChain().size(), cube.getJoinChains().getJoinChain().size());
-      Map<String, XJoinChain> chains = new HashMap<String, XJoinChain>();
+      Map<String, XJoinChain> chains = new HashMap<>();
       for (XJoinChain xjc : actual.getJoinChains().getJoinChain()) {
         chains.put(xjc.getName(), xjc);
       }
@@ -721,7 +675,7 @@ public class TestMetastoreService extends LensJerseyTest {
       element = cubeObjectFactory.createXCube(dcube);
       result =
         target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Now get
       target = target().path("metastore").path("cubes").path("testGetDerivedCube");
@@ -753,7 +707,7 @@ public class TestMetastoreService extends LensJerseyTest {
       JAXBElement<XCube> element = cubeObjectFactory.createXCube(cube);
       APIResult result =
         target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       final XCube dcube = createDerivedCube("test_drop_derived_cube", "test_drop_cube", false);
       target = target().path("metastore").path("cubes");
@@ -761,11 +715,11 @@ public class TestMetastoreService extends LensJerseyTest {
       element = cubeObjectFactory.createXCube(dcube);
       result =
         target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       target = target().path("metastore").path("cubes").path("test_drop_derived_cube");
       result = target.queryParam("sessionid", lensSessionId).request(mediaType).delete(APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Now get should give 404
       try {
@@ -779,7 +733,7 @@ public class TestMetastoreService extends LensJerseyTest {
 
       target = target().path("metastore").path("cubes").path("test_drop_cube");
       result = target.queryParam("sessionid", lensSessionId).request(mediaType).delete(APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Now get should give 404
       try {
@@ -811,7 +765,7 @@ public class TestMetastoreService extends LensJerseyTest {
       JAXBElement<XCube> element = cubeObjectFactory.createXCube(cube);
       APIResult result =
         target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Update something
       // Add a measure and dimension
@@ -835,7 +789,7 @@ public class TestMetastoreService extends LensJerseyTest {
       element = cubeObjectFactory.createXCube(cube);
       result = target.path(cubeName)
         .queryParam("sessionid", lensSessionId).request(mediaType).put(Entity.xml(element), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       JAXBElement<XCube> got =
         target.path(cubeName)
@@ -862,7 +816,7 @@ public class TestMetastoreService extends LensJerseyTest {
       element = cubeObjectFactory.createXCube(dcube);
       result =
         target.queryParam("sessionid", lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Update something
       // Add a measure and dimension
@@ -878,7 +832,7 @@ public class TestMetastoreService extends LensJerseyTest {
       element = cubeObjectFactory.createXCube(dcube);
       result = target.path("test_update_derived")
         .queryParam("sessionid", lensSessionId).request(mediaType).put(Entity.xml(element), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       got = target.path("test_update_derived")
         .queryParam("sessionid", lensSessionId).request(mediaType).get(new GenericType<JAXBElement<XCube>>() {});
@@ -938,7 +892,7 @@ public class TestMetastoreService extends LensJerseyTest {
       APIResult result = target.path("store1")
         .queryParam("sessionid", lensSessionId).queryParam("storage", "store1")
         .request(mediaType).put(Entity.xml(cubeObjectFactory.createXStorage(store1)), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       store1 = target.path("store1").queryParam("sessionid", lensSessionId).request(mediaType).get(XStorage.class);
       assertEquals(store1.getName(), "store1");
@@ -1033,7 +987,7 @@ public class TestMetastoreService extends LensJerseyTest {
     APIResult result = target().path("metastore").path("dimensions")
       .queryParam("sessionid", lensSessionId).request(
         mediaType).post(Entity.xml(cubeObjectFactory.createXDimension(dimension)), APIResult.class);
-    assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(result);
     XDimensionTable dt = createDimTable("testdim", dimTableName);
     dt.getStorageTables().getStorageTable().add(createStorageTblElement("test", dimTableName, "HOURLY"));
     final FormDataMultiPart mp = new FormDataMultiPart();
@@ -1047,7 +1001,7 @@ public class TestMetastoreService extends LensJerseyTest {
       .path("dimtables")
       .request(mediaType)
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
-    assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(result);
     return dt;
   }
 
@@ -1187,13 +1141,13 @@ public class TestMetastoreService extends LensJerseyTest {
     APIResult result = target.queryParam("sessionid", lensSessionId).request(
       mediaType).post(Entity.xml(cubeObjectFactory.createXDimension(dimension)), APIResult.class);
     assertNotNull(result);
-    assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(result);
 
     // create
     result = target.queryParam("sessionid", lensSessionId).request(
       mediaType).post(Entity.xml(cubeObjectFactory.createXDimension(dimension2)), APIResult.class);
     assertNotNull(result);
-    assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+    assertSuccess(result);
   }
 
   @Test
@@ -1306,7 +1260,7 @@ public class TestMetastoreService extends LensJerseyTest {
       APIResult result = target.path("testdim")
         .queryParam("sessionid", lensSessionId)
         .request(mediaType).put(Entity.xml(cubeObjectFactory.createXDimension(testDim)), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       XDimension altered = target.path("testdim").queryParam("sessionid", lensSessionId).request(mediaType).get(
         XDimension.class);
@@ -1327,7 +1281,7 @@ public class TestMetastoreService extends LensJerseyTest {
       // drop the dimension
       result = target.path("testdim")
         .queryParam("sessionid", lensSessionId).request(mediaType).delete(APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Now get should give 404
       try {
@@ -1369,11 +1323,11 @@ public class TestMetastoreService extends LensJerseyTest {
         target().path("metastore/dimtables").path(table)
           .queryParam("cascade", "true")
           .queryParam("sessionid", lensSessionId).request(mediaType).delete(APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Drop again, should get 404 now
       try {
-        result = target().path("metastore/dimtables").path(table)
+        target().path("metastore/dimtables").path(table)
           .queryParam("cascade", "true")
           .queryParam("sessionid", lensSessionId).request(mediaType).delete(APIResult.class);
         fail("Should have got 404");
@@ -1428,7 +1382,7 @@ public class TestMetastoreService extends LensJerseyTest {
         .path(table)
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .put(Entity.xml(cubeObjectFactory.createXDimensionTable(dt2)), APIResult.class);
-      assertEquals(result.getStatus(), Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Get the updated table
       JAXBElement<XDimensionTable> dtElement2 = target().path("metastore/dimtables").path(table)
@@ -1465,7 +1419,7 @@ public class TestMetastoreService extends LensJerseyTest {
         .path(table)
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .put(Entity.xml(cubeObjectFactory.createXDimensionTable(dt3)), APIResult.class);
-      assertEquals(result.getStatus(), Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Get the updated table
       JAXBElement<XDimensionTable> dtElement4 = target().path("metastore/dimtables").path(table)
@@ -1495,7 +1449,7 @@ public class TestMetastoreService extends LensJerseyTest {
         target().path("metastore/dimtables").path(table)
           .queryParam("cascade", "true")
           .queryParam("sessionid", lensSessionId).request(mediaType).delete(APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
     } finally {
       setCurrentDatabase(prevDb);
       dropDatabase(DB);
@@ -1542,7 +1496,7 @@ public class TestMetastoreService extends LensJerseyTest {
       APIResult result = target().path("metastore/dimtables").path(table).path("/storages")
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .post(Entity.xml(cubeObjectFactory.createXStorageTableElement(sTbl)), APIResult.class);
-      assertEquals(result.getStatus(), Status.SUCCEEDED);
+      assertSuccess(result);
 
       StringList storages = target().path("metastore").path("dimtables")
         .path(table).path("storages")
@@ -1566,7 +1520,7 @@ public class TestMetastoreService extends LensJerseyTest {
       result = target().path("metastore/dimtables/").path(table).path("storages").path("test")
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .delete(APIResult.class);
-      assertEquals(result.getStatus(), Status.SUCCEEDED);
+      assertSuccess(result);
 
       storages = target().path("metastore").path("dimtables")
         .path(table).path("storages")
@@ -1591,7 +1545,7 @@ public class TestMetastoreService extends LensJerseyTest {
       result = target().path("metastore/dimtables").path(table).path("/storages")
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .post(Entity.xml(cubeObjectFactory.createXStorageTableElement(sTbl)), APIResult.class);
-      assertEquals(result.getStatus(), Status.SUCCEEDED);
+      assertSuccess(result);
 
       storages = target().path("metastore").path("dimtables")
         .path(table).path("storages")
@@ -1632,12 +1586,12 @@ public class TestMetastoreService extends LensJerseyTest {
       APIResult result = target().path("metastore/dimtables").path(table).path("/storages")
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .post(Entity.xml(cubeObjectFactory.createXStorageTableElement(sTbl)), APIResult.class);
-      assertEquals(result.getStatus(), Status.SUCCEEDED);
+      assertSuccess(result);
 
       result = target().path("metastore/dimtables/").path(table).path("storages")
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .delete(APIResult.class);
-      assertEquals(result.getStatus(), Status.SUCCEEDED);
+      assertSuccess(result);
 
 
       JAXBElement<XDimensionTable> dt = target().path("metastore/dimtables").path(table)
@@ -1679,8 +1633,7 @@ public class TestMetastoreService extends LensJerseyTest {
     f.getColumns().getColumn().add(c2);
 
 
-    Map<String, String> properties = new HashMap<String, String>();
-    properties.put("foo", "bar");
+    Map<String, String> properties = LensUtil.getHashMap("foo", "bar");
     f.getProperties().getProperty().addAll(JAXBUtils.xPropertiesFromMap(properties));
     return f;
   }
@@ -1710,7 +1663,7 @@ public class TestMetastoreService extends LensJerseyTest {
         .path("facts")
         .request(mediaType)
         .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Get all fact names, this should contain the fact table
       StringList factNames = target().path("metastore/facts")
@@ -1748,11 +1701,11 @@ public class TestMetastoreService extends LensJerseyTest {
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .delete(APIResult.class);
 
-      assertEquals(result.getStatus(), Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Drop again, this time it should give a 404
       try {
-        result = target().path("metastore").path("facts").path(table)
+        target().path("metastore").path("facts").path(table)
           .queryParam("cascade", "true")
           .queryParam("sessionid", lensSessionId).request(mediaType)
           .delete(APIResult.class);
@@ -1792,7 +1745,7 @@ public class TestMetastoreService extends LensJerseyTest {
         .path("facts")
         .request(mediaType)
         .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Get the created table
       JAXBElement<XFactTable> gotFactElement = target().path("metastore/facts").path(table)
@@ -1818,7 +1771,7 @@ public class TestMetastoreService extends LensJerseyTest {
       result = target().path("metastore").path("facts").path(table)
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .put(Entity.xml(cubeObjectFactory.createXFactTable(update)), APIResult.class);
-      assertEquals(result.getStatus(), Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Get the updated table
       gotFactElement = target().path("metastore/facts").path(table)
@@ -1859,11 +1812,11 @@ public class TestMetastoreService extends LensJerseyTest {
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .delete(APIResult.class);
 
-      assertEquals(result.getStatus(), Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Drop again, this time it should give a 404
       try {
-        result = target().path("metastore").path("facts").path(table)
+        target().path("metastore").path("facts").path(table)
           .queryParam("cascade", "true")
           .queryParam("sessionid", lensSessionId).request(mediaType)
           .delete(APIResult.class);
@@ -1903,7 +1856,7 @@ public class TestMetastoreService extends LensJerseyTest {
         .path("facts")
         .request(mediaType)
         .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Test get storages
       StringList storageList = target().path("metastore/facts").path(table).path("storages")
@@ -1916,7 +1869,7 @@ public class TestMetastoreService extends LensJerseyTest {
       result = target().path("metastore/facts").path(table).path("storages")
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .post(Entity.xml(cubeObjectFactory.createXStorageTableElement(sTbl)), APIResult.class);
-      assertEquals(result.getStatus(), Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Get the fact storage
       StringList got = target().path("metastore/facts").path(table).path("storages")
@@ -1941,10 +1894,9 @@ public class TestMetastoreService extends LensJerseyTest {
       // Drop new storage
       result = target().path("metastore/facts").path(table).path("storages").path("S3")
         .queryParam("sessionid", lensSessionId).request(mediaType).delete(APIResult.class);
-      assertEquals(result.getStatus(), Status.SUCCEEDED);
+      assertSuccess(result);
 
       // Now S3 should not be available
-      storageList = null;
       storageList = target().path("metastore/facts").path(table).path("storages")
         .queryParam("sessionid", lensSessionId).request(mediaType).get(StringList.class);
       assertEquals(storageList.getElements().size(), 2);
@@ -1958,15 +1910,17 @@ public class TestMetastoreService extends LensJerseyTest {
   private XPartition createPartition(String cubeTableName, Date partDate) {
     return createPartition(cubeTableName, partDate, "dt");
   }
+
   private XTimePartSpecElement createTimePartSpecElement(Date partDate, String timeDimension) {
     XTimePartSpecElement timePart = cubeObjectFactory.createXTimePartSpecElement();
     timePart.setKey(timeDimension);
     timePart.setValue(JAXBUtils.getXMLGregorianCalendar(HOURLY.truncate(partDate)));
     return timePart;
   }
+
   private XPartition createPartition(String cubeTableName, Date partDate, final String timeDimension) {
 
-    return createPartition(cubeTableName, Arrays.asList(createTimePartSpecElement(partDate, timeDimension)));
+    return createPartition(cubeTableName, Lists.newArrayList(createTimePartSpecElement(partDate, timeDimension)));
   }
 
   private XPartition createPartition(String cubeTableName, final List<XTimePartSpecElement> timePartSpecs) {
@@ -2007,9 +1961,7 @@ public class TestMetastoreService extends LensJerseyTest {
       final XCube cube = createTestCube(cubeName);
       APIResult result = target().path("metastore").path("cubes").queryParam("sessionid", lensSessionId)
         .request(mediaType).post(Entity.xml(cubeObjectFactory.createXCube(cube)), APIResult.class);
-      if (!result.getStatus().equals(APIResult.Status.SUCCEEDED)) {
-        throw new RuntimeException("Setup failure: Cube Creation failed : " + result.getMessage());
-      }
+      assertSuccess(result);
 
       // Create two facts and fact storage tables with one of the facts
       // not having one of the time dimensions in the partition
@@ -2049,9 +2001,7 @@ public class TestMetastoreService extends LensJerseyTest {
         .path("storages/" + storages[0] + "/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .post(Entity.xml(cubeObjectFactory.createXPartition(xp)), APIResult.class);
-      if (!partAddResult.getStatus().equals(APIResult.Status.SUCCEEDED)) {
-        throw new RuntimeException("Setup failure: Partition Creation failed : " + partAddResult.getMessage());
-      }
+      assertSuccess(partAddResult);
 
       // End: Setup
 
@@ -2106,7 +2056,7 @@ public class TestMetastoreService extends LensJerseyTest {
         .path("facts")
         .request(mediaType)
         .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       APIResult partAddResult;
       // Add null partition
@@ -2131,7 +2081,7 @@ public class TestMetastoreService extends LensJerseyTest {
       partAddResult = target().path("metastore/facts/").path(table).path("storages/S2/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .post(Entity.xml(cubeObjectFactory.createXPartition(xp)), APIResult.class);
-      assertEquals(partAddResult.getStatus(), Status.SUCCEEDED);
+      assertSuccess(partAddResult);
 
       // add same should fail
       partAddResult = target().path("metastore/facts/").path(table).path("storages/S2/partition")
@@ -2143,7 +2093,7 @@ public class TestMetastoreService extends LensJerseyTest {
       APIResult partUpdateResult = target().path("metastore/facts/").path(table).path("storages/S2/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .put(Entity.xml(cubeObjectFactory.createXPartition(xp)), APIResult.class);
-      assertEquals(partUpdateResult.getStatus(), Status.SUCCEEDED);
+      assertSuccess(partUpdateResult);
 
       JAXBElement<XPartitionList> partitionsElement = target().path("metastore/facts").path(table)
         .path("storages/S2/partitions")
@@ -2161,7 +2111,7 @@ public class TestMetastoreService extends LensJerseyTest {
       XTimePartSpecElement timePartSpec = readPartition.getTimePartitionSpec().getPartSpecElement().iterator().next();
       XPartSpecElement fullPartSpec = readPartition.getFullPartitionSpec().getPartSpecElement().iterator().next();
       assertEquals(timePartSpec.getKey(), fullPartSpec.getKey());
-      assertEquals(UpdatePeriod.valueOf(xp.getUpdatePeriod().name()).format().format(JAXBUtils.getDateFromXML(
+      assertEquals(UpdatePeriod.valueOf(xp.getUpdatePeriod().name()).format(JAXBUtils.getDateFromXML(
         timePartSpec.getValue())), fullPartSpec.getValue());
       DateTime date =
         target().path("metastore/cubes").path("testCube").path("latestdate").queryParam("timeDimension", "dt")
@@ -2185,7 +2135,7 @@ public class TestMetastoreService extends LensJerseyTest {
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .delete(APIResult.class);
 
-      assertEquals(dropResult.getStatus(), Status.SUCCEEDED);
+      assertSuccess(dropResult);
 
       // Verify partition was dropped
       partitionsElement = target().path("metastore/facts").path(table).path("storages/S2/partitions")
@@ -2227,7 +2177,7 @@ public class TestMetastoreService extends LensJerseyTest {
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .post(Entity.xml(cubeObjectFactory.createXPartitionList(toXPartitionList(xp))),
           APIResult.class);
-      assertEquals(partAddResult.getStatus(), Status.SUCCEEDED);
+      assertSuccess(partAddResult);
 
       // Verify partition was added
       partitionsElement = target().path("metastore/facts").path(table).path("storages/S2/partitions")
@@ -2239,12 +2189,12 @@ public class TestMetastoreService extends LensJerseyTest {
       assertEquals(partitions.getPartition().size(), 1);
 
       // Drop again by values
-      String[] val = new String[]{HOURLY.format().format(partDate)};
+      String[] val = new String[]{HOURLY.format(partDate)};
       dropResult = target().path("metastore/facts").path(table).path("storages/S2/partition")
         .queryParam("values", StringUtils.join(val, ","))
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .delete(APIResult.class);
-      assertEquals(dropResult.getStatus(), Status.SUCCEEDED);
+      assertSuccess(dropResult);
 
       // Verify partition was dropped
       partitionsElement = target().path("metastore/facts").path(table).path("storages/S2/partitions")
@@ -2286,7 +2236,7 @@ public class TestMetastoreService extends LensJerseyTest {
       partAddResult = target().path("metastore/dimtables/").path(table).path("storages/test/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .post(Entity.xml(cubeObjectFactory.createXPartition(xp)), APIResult.class);
-      assertEquals(partAddResult.getStatus(), Status.SUCCEEDED);
+      assertSuccess(partAddResult);
 
       // create call for same
       partAddResult = target().path("metastore/dimtables/").path(table).path("storages/test/partition")
@@ -2299,7 +2249,7 @@ public class TestMetastoreService extends LensJerseyTest {
       APIResult partUpdateResult = target().path("metastore/dimtables/").path(table).path("storages/test/partition")
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .put(Entity.xml(cubeObjectFactory.createXPartition(xp)), APIResult.class);
-      assertEquals(partUpdateResult.getStatus(), Status.SUCCEEDED);
+      assertSuccess(partUpdateResult);
 
       JAXBElement<XPartitionList> partitionsElement = target().path("metastore/dimtables").path(table)
         .path("storages/test/partitions")
@@ -2332,7 +2282,7 @@ public class TestMetastoreService extends LensJerseyTest {
       XTimePartSpecElement timePartSpec = postedPartition.getTimePartitionSpec().getPartSpecElement().iterator().next();
       XPartSpecElement fullPartSpec = postedPartition.getFullPartitionSpec().getPartSpecElement().iterator().next();
       assertEquals(timePartSpec.getKey(), fullPartSpec.getKey());
-      assertEquals(UpdatePeriod.valueOf(xp.getUpdatePeriod().name()).format().format(JAXBUtils.getDateFromXML(
+      assertEquals(UpdatePeriod.valueOf(xp.getUpdatePeriod().name()).format(JAXBUtils.getDateFromXML(
         timePartSpec.getValue())), fullPartSpec.getValue());
 
       assertNull(latestPartition.getTimePartitionSpec());
@@ -2355,7 +2305,7 @@ public class TestMetastoreService extends LensJerseyTest {
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .delete(APIResult.class);
 
-      assertEquals(dropResult.getStatus(), Status.SUCCEEDED);
+      assertSuccess(dropResult);
 
       // Verify partition was dropped
       partitionsElement = target().path("metastore/dimtables").path(table).path("storages/test/partitions")
@@ -2389,7 +2339,7 @@ public class TestMetastoreService extends LensJerseyTest {
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .post(Entity.xml(cubeObjectFactory.createXPartitionList(toXPartitionList(xp))),
           APIResult.class);
-      assertEquals(partAddResult.getStatus(), Status.SUCCEEDED);
+      assertSuccess(partAddResult);
 
       // Verify partition was added
       partitionsElement = target().path("metastore/dimtables").path(table).path("storages/test/partitions")
@@ -2401,18 +2351,39 @@ public class TestMetastoreService extends LensJerseyTest {
       assertEquals(partitions.getPartition().size(), 2);
 
       // Drop again by values
-      String[] val = new String[]{HOURLY.format().format(partDate)};
+      String[] val = new String[]{HOURLY.format(partDate)};
       dropResult = target().path("metastore/dimtables").path(table).path("storages/test/partition")
         .queryParam("values", StringUtils.join(val, ","))
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .delete(APIResult.class);
-      assertEquals(dropResult.getStatus(), Status.SUCCEEDED);
+      assertSuccess(dropResult);
 
       // Verify partition was dropped
       partitionsElement = target().path("metastore/dimtables").path(table).path("storages/test/partitions")
         .queryParam("sessionid", lensSessionId).request(mediaType)
         .get(new GenericType<JAXBElement<XPartitionList>>() {});
+      partitions = partitionsElement.getValue();
+      assertNotNull(partitions);
+      assertEquals(partitions.getPartition().size(), 0);
+
+      // add again, this time we'll drop by filter
+      partAddResult = target().path("metastore/dimtables/").path(table).path("storages/test/partitions")
+        .queryParam("sessionid", lensSessionId).request(mediaType)
+        .post(Entity.xml(cubeObjectFactory.createXPartitionList(toXPartitionList(xp))),
+          APIResult.class);
+      assertSuccess(partAddResult);
 
+      // drop by filter
+      dropResult = target().path("metastore/dimtables").path(table).path("storages/test/partitions")
+        .queryParam("filter", "dt='" + HOURLY.format(partDate) + "'")
+        .queryParam("sessionid", lensSessionId).request(mediaType)
+        .delete(APIResult.class);
+      assertSuccess(dropResult);
+
+      // Verify partition was dropped
+      partitionsElement = target().path("metastore/dimtables").path(table).path("storages/test/partitions")
+        .queryParam("sessionid", lensSessionId).request(mediaType)
+        .get(new GenericType<JAXBElement<XPartitionList>>() {});
       partitions = partitionsElement.getValue();
       assertNotNull(partitions);
       assertEquals(partitions.getPartition().size(), 0);
@@ -2507,7 +2478,7 @@ public class TestMetastoreService extends LensJerseyTest {
       APIResult result =
         target().path("metastore").path("cubes").queryParam("sessionid",
           lensSessionId).request(mediaType).post(Entity.xml(element), APIResult.class);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // get a cube table
       Response response = target.path("testhiveCube").queryParam(
@@ -2567,14 +2538,14 @@ public class TestMetastoreService extends LensJerseyTest {
         cubeTarget.queryParam("sessionid", lensSessionId).request(mediaType)
           .post(Entity.xml(cubeObjectFactory.createXCube(flatTestCube)), APIResult.class);
       assertNotNull(result);
-      assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
+      assertSuccess(result);
 
       // create chained dimensions - testdim and testdim2
       createdChainedDimensions();
 
       // Now test flattened view
       final WebTarget flatCubeTarget = target().path("metastore").path("flattened").path("flattestcube");
-      XFlattenedColumns flattenedColumns = null;
+      XFlattenedColumns flattenedColumns;
       JAXBElement<XFlattenedColumns> actualElement = flatCubeTarget.queryParam("sessionid", lensSessionId).request()
         .get(new GenericType<JAXBElement<XFlattenedColumns>>() {});
       flattenedColumns = actualElement.getValue();
@@ -2584,12 +2555,12 @@ public class TestMetastoreService extends LensJerseyTest {
       assertNotNull(columns);
       assertTrue(!columns.isEmpty());
 
-      Set<String> tables = new HashSet<String>();
-      Set<String> colSet = new HashSet<String>();
+      Set<String> tables = new HashSet<>();
+      Set<String> colSet = new HashSet<>();
       populateActualTablesAndCols(columns, tables, colSet);
 
-      assertEquals(tables, new HashSet<String>(Arrays.asList("flattestcube", "testdim", "testdim2")));
-      assertEquals(colSet, new HashSet<String>(Arrays.asList(
+      assertEquals(tables, Sets.newHashSet("flattestcube", "testdim", "testdim2"));
+      assertEquals(colSet, Sets.newHashSet(
         "flattestcube.msr1",
         "flattestcube.msr2",
         "flattestcube.dim1",
@@ -2607,7 +2578,7 @@ public class TestMetastoreService extends LensJerseyTest {
         "dim2chain-testdim2.col2",
         "dim2chain-testdim2.col1",
         "dim2chain-testdim2.dimexpr"
-      )));
+      ));
 
       // Now test flattened view for dimension
       final WebTarget flatDimTarget = target().path("metastore").path("flattened").path("testdim");
@@ -2620,12 +2591,12 @@ public class TestMetastoreService extends LensJerseyTest {
       assertNotNull(columns);
       assertTrue(!columns.isEmpty());
 
-      tables = new HashSet<String>();
-      colSet = new HashSet<String>();
+      tables = new HashSet<>();
+      colSet = new HashSet<>();
       populateActualTablesAndCols(columns, tables, colSet);
 
-      assertEquals(tables, new HashSet<String>(Arrays.asList("testdim", "testdim2")));
-      assertEquals(colSet, new HashSet<String>(Arrays.asList(
+      assertEquals(tables, Sets.newHashSet("testdim", "testdim2"));
+      assertEquals(colSet, Sets.newHashSet(
         "testdim.col2",
         "testdim.col1",
         "testdim.col3",
@@ -2635,7 +2606,7 @@ public class TestMetastoreService extends LensJerseyTest {
         "chain1-testdim2.col2",
         "chain1-testdim2.col1",
         "chain1-testdim2.dimexpr"
-      )));
+      ));
 
     } finally {
       dropDatabase(DB);
@@ -2649,8 +2620,10 @@ public class TestMetastoreService extends LensJerseyTest {
     // Create a fact table object linked to cubeName
     XFactTable f = createFactTable(tableName, cubeName);
     // Create a storage tables
-    f.getStorageTables().getStorageTable().add(createStorageTblElement("S1", tableName, timePartColNames, "HOURLY"));
-
+    for(String storage: storages) {
+      f.getStorageTables().getStorageTable()
+        .add(createStorageTblElement(storage, tableName, timePartColNames, "HOURLY"));
+    }
 
     // Call API to create a fact table and storage table
     final FormDataMultiPart mp = new FormDataMultiPart();
@@ -2664,9 +2637,7 @@ public class TestMetastoreService extends LensJerseyTest {
       .path("facts")
       .request(mediaType)
       .post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE), APIResult.class);
-    if (!result.getStatus().equals(APIResult.Status.SUCCEEDED)) {
-      throw new RuntimeException("Fact/Storage Table Creation failed");
-    }
+    assertSuccess(result);
   }
 
   private String getUniqueDbName() {


[07/50] [abbrv] lens git commit: LENS-864: LENS - 864

Posted by sh...@apache.org.
LENS-864: LENS - 864


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/2870be7c
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/2870be7c
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/2870be7c

Branch: refs/heads/LENS-581
Commit: 2870be7c8c2dbef92c237878c5faba048d3a139d
Parents: 7b5f4a0
Author: Ankeet Maini <an...@gmail.com>
Authored: Wed Nov 18 17:58:45 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Wed Nov 18 17:58:45 2015 +0530

----------------------------------------------------------------------
 lens-ui/app/components/QueryBoxComponent.js     | 122 +++++++------
 .../app/components/QueryParamRowComponent.js    | 176 +++++++++----------
 lens-ui/app/components/QueryParamsComponent.js  |  72 +++++---
 .../components/SavedQueryPreviewComponent.js    |  26 +--
 4 files changed, 219 insertions(+), 177 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/2870be7c/lens-ui/app/components/QueryBoxComponent.js
----------------------------------------------------------------------
diff --git a/lens-ui/app/components/QueryBoxComponent.js b/lens-ui/app/components/QueryBoxComponent.js
index 6f4eeb7..ec6a06b 100644
--- a/lens-ui/app/components/QueryBoxComponent.js
+++ b/lens-ui/app/components/QueryBoxComponent.js
@@ -21,6 +21,7 @@ import React from 'react';
 import ClassNames from 'classnames';
 import CodeMirror from 'codemirror';
 import assign from 'object-assign';
+import _ from 'lodash';
 import 'codemirror/lib/codemirror.css';
 import 'codemirror/addon/edit/matchbrackets.js';
 import 'codemirror/addon/hint/sql-hint.js';
@@ -74,6 +75,20 @@ function setCode (code) {
   }
 }
 
+function getEmptyState () {
+  return {
+    clientMessage: null, // to give user instant ack
+    isRunQueryDisabled: true,
+    serverMessage: null, // type (success or error), text as keys
+    isCollapsed: false,
+    params: null,
+    isModeEdit: false,
+    savedQueryId: null,
+    runImmediately: false,
+    description: ''
+  };
+}
+
 // used to populate the query box when user wants to edit a query
 // TODO improve this.
 // this takes in the query handle and writes the query
@@ -114,16 +129,7 @@ class QueryBox extends React.Component {
     this.saveOrUpdate = this.saveOrUpdate.bind(this);
     this.runSavedQuery = this.runSavedQuery.bind(this);
 
-    this.state = {
-      clientMessage: null, // to give user instant ack
-      isRunQueryDisabled: true,
-      serverMessage: null, // type (success or error), text as keys
-      isCollapsed: false,
-      params: null,
-      isModeEdit: false,
-      savedQueryId: null,
-      runImmediately: false
-    };
+    this.state = getEmptyState();
   }
 
   componentDidMount () {
@@ -184,6 +190,7 @@ class QueryBox extends React.Component {
         this.setState({
           params: savedQuery.parameters,
           savedQueryId: savedQuery.id,
+          description: savedQuery.description,
           isModeEdit: true
         });
       }
@@ -257,7 +264,7 @@ class QueryBox extends React.Component {
 
             { this.state.params && !!this.state.params.length &&
               <QueryParams params={this.state.params} close={this.closeParamBox}
-                saveParams={this.saveParams}/>
+                saveParams={this.saveParams} description={this.state.description}/>
             }
 
             { this.state.serverMessage &&
@@ -304,17 +311,40 @@ class QueryBox extends React.Component {
   updateQuery (params) {
     let query = this._getSavedQueryDetails(params);
     if (!query) return;
+
+    var options = {
+      parameters: query.parameters,
+      description: query.description,
+      name: query.name
+    };
+
     AdhocQueryActions
-      .updateSavedQuery(query.secretToken, query.user, query.query, query.params, this.state.savedQueryId);
-    this.setState({ clientMessage: clientMessages.updateQuery });
+      .updateSavedQuery(query.secretToken, query.user, query.query,
+        options, this.state.savedQueryId);
+
+    this.setState({
+      clientMessage: clientMessages.updateQuery,
+      runImmediately: params && params.runImmediately
+    });
   }
 
   saveQuery (params) {
     let query = this._getSavedQueryDetails(params);
     if (!query) return;
+
+    var options = {
+      parameters: query.parameters,
+      description: query.description,
+      name: query.name
+    };
+
     AdhocQueryActions
-      .saveQuery(query.secretToken, query.user, query.query, query.params);
-    this.setState({ clientMessage: clientMessages.saveQuery });
+      .saveQuery(query.secretToken, query.user, query.query, options);
+
+    this.setState({
+      clientMessage: clientMessages.saveQuery,
+      runImmediately: params && params.runImmediately
+    });
   }
 
   // internal which is called during save saved query & edit saved query
@@ -329,14 +359,13 @@ class QueryBox extends React.Component {
     let user = UserStore.getUserDetails().email;
     let query = codeMirror.getValue();
 
-    params = assign({}, params);
-    params.name = queryName;
-
     return {
       secretToken: secretToken,
       user: user,
       query: query,
-      params: params
+      parameters: params && params.parameters,
+      description: params && params.description,
+      name: queryName
     };
   }
 
@@ -419,42 +448,47 @@ class QueryBox extends React.Component {
   _onChangeSavedQueryStore (hash) {
     if (!hash) return;
 
+    var newState = _.assign({}, this.state);
+
     switch (hash.type) {
       case 'failure':
-        this.state.clientMessage = null;
-        this.state.serverMessage = hash.message;
+        newState.clientMessage = null;
+        newState.serverMessage = hash.message;
         break;
 
       case 'success':
-        this.state.clientMessage = null;
-        this.state.serverMessage = hash.message;
-        // make the mode of QueryBox back to normal, if it's in Edit
-        if (this.state.isModeEdit) {
-          this.state.isModeEdit = false;
-        }
-
         // trigger to fetch the edited from server again
         let token = UserStore.getUserDetails().secretToken;
         if (hash.id) AdhocQueryActions.getSavedQueryById(token, hash.id);
         // means the query was saved successfully.
 
         // run immediately?
-        if (this.state.runImmediately && hash.id) {
+        if (newState.runImmediately && hash.id) {
           this.runSavedQuery(hash.id);
-          this.state.runImmediately = false;
+          newState.runImmediately = false;
         }
 
-        // make params null
-        this.state.params = null;
-
+        // empty the state, clean the slate
+        setCode('');
+        this.refs.queryName.getDOMNode().value = '';
+        newState = getEmptyState();
+        newState.serverMessage = hash.message;
         break;
 
       case 'params':
-        this.state.params = hash.params;
+        newState.params = hash.params.map(param => {
+          return {
+            name: param.name,
+            dataType: param.dataType || 'STRING',
+            collectionType: param.collectionType || 'SINGLE',
+            defaultValue: param.defaultValue || null,
+            displayName: param.displayName || param.name
+          };
+        });
         break;
     }
 
-    this.setState(this.state);
+    this.setState(newState);
   }
 
   runSavedQuery (id) {
@@ -472,29 +506,17 @@ class QueryBox extends React.Component {
   }
 
   closeParamBox () {
-    this.setState({params: null, clientMessage: null});
+    this.cancel();
   }
 
-  saveParams (params) { // contains parameters, description et all
-    this.state.params = assign(this.state.params, params.parameters);
-    this.state.runImmediately = params.runImmediately;
-
-    // edit or save new, only state variable will tell
+  saveParams (params) {
     !this.state.isModeEdit ? this.saveQuery(params) : this.updateQuery(params);
   }
 
   cancel () {
     setCode('');
     this.refs.queryName.getDOMNode().value = '';
-    this.setState({
-      clientMessage: null, // to give user instant ack
-      isRunQueryDisabled: true,
-      serverMessage: null, // type (success or error), text as keys
-      isCollapsed: false,
-      params: null,
-      isModeEdit: false,
-      savedQueryId: null
-    });
+    this.setState(getEmptyState());
   }
 }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/2870be7c/lens-ui/app/components/QueryParamRowComponent.js
----------------------------------------------------------------------
diff --git a/lens-ui/app/components/QueryParamRowComponent.js b/lens-ui/app/components/QueryParamRowComponent.js
index fb5f5da..89c8a8e 100644
--- a/lens-ui/app/components/QueryParamRowComponent.js
+++ b/lens-ui/app/components/QueryParamRowComponent.js
@@ -20,73 +20,53 @@
 import React from 'react';
 import { Multiselect } from 'react-widgets';
 import assign from 'object-assign';
+import _ from 'lodash';
 import 'react-widgets/dist/css/core.css';
 import 'react-widgets/dist/css/react-widgets.css';
 
-// returns true/false if the default value is correct
-// and also returns the value
-function validate (val, dataType) {
-  // if (dataType === 'NUMBER' && !window.isNaN(val)) return [true, val];
-  // if (dataType === 'BOOLEAN' && (val === 'true' || val === 'false')) {
-  //   return [true, val];
-  // }
-  // if (dataType === 'STRING' && typeof val === 'string') return [true, val];
-
-  return [true, val];
-}
-
 class QueryParamRow extends React.Component {
   constructor (props) {
     super(props);
 
-    // state being decided by mode of use of this component
-    // `entryMode` is used by the SavedQueryPreviewComponent,
-    // to just add values and run the saved query.
-    if (props.entryMode) {
-      this.state = assign({}, props.param);
-    } else {
-      this.state = assign({}, props.param, {
-        dataType: 'STRING',
-        collectionType: 'SINGLE',
-        displayName: props.param.name
-      });
-    }
-
-    this.changeDisplayName = this.changeDisplayName.bind(this);
-    this.changeDataType = this.changeDataType.bind(this);
-    this.changeCollectionType = this.changeCollectionType.bind(this);
-    this.changeDefaultValue = this.changeDefaultValue.bind(this);
-    this.addDefaultValue = this.addDefaultValue.bind(this);
-    this.preventEnter = this.preventEnter.bind(this);
-  }
+    this.state = {
+      paramChange: assign({}, props.param)
+    };
 
-  componentWillReceiveProps (props) {
-    this.setState(assign({}, props.param));
+    this._handleChange = this._handleChange.bind(this);
+    this.getDefaultValueInput = this.getDefaultValueInput.bind(this);
   }
 
-  componentWillUpdate (props, state) {
-    this.props.updateParam({
-      name: props.param.name,
-      param: state
-    });
+  shouldComponentUpdate (newProps, newState) {
+    return !_.isEqual(this.state, newState);
   }
 
   render () {
     let param = this.props.param;
 
+    let collectionType = this.state.paramChange.collectionType;
+    let dataType = this.state.paramChange.dataType;
+
+    let collectionTypeBox = (<select className='form-control' required
+      defaultValue='SINGLE' onChange={this._handleChange('ChangeCollectionType')}>
+        <option value='SINGLE'>Single</option>
+        <option value='MULTIPLE'>Multiple</option>
+      </select>);
+
+    let valueBox = this.getDefaultValueInput(collectionType, dataType);
+
     return (
       <tr>
         <td>{param.name}</td>
         <td>
           { this.props.entryMode ? param.displayName :
             <input type='text' className='form-control' required defaultValue={param.name}
-              placeholder='display name' onChange={this.changeDisplayName}/>
+              placeholder='display name' onChange={this._handleChange('ChangeDisplayName')}/>
           }
         </td>
         <td>
           { this.props.entryMode ? param.dataType :
-            <select className='form-control' defaultValue='STRING'
-              onChange={this.changeDataType}>
+            <select className='form-control' defaultValue={dataType || 'STRING'}
+              onChange={this._handleChange('ChangeDataType')}>
               <option value='STRING'>String</option>
               <option value='NUMBER'>Number</option>
               <option value='BOOLEAN'>Boolean</option>
@@ -95,78 +75,88 @@ class QueryParamRow extends React.Component {
         </td>
         <td>
           { this.props.entryMode ? param.collectionType :
-            <select className='form-control' required defaultValue='SINGLE'
-              onChange={this.changeCollectionType}>
-              <option value='SINGLE'>Single</option>
-              <option value='MULTIPLE'>Multiple</option>
-            </select>
+            {collectionTypeBox}
           }
 
         </td>
         <td>
-          { !this.props.entryMode && (this.state.collectionType === 'SINGLE' ?
-            <input type='text' className='form-control' required value={this.state.defaultValue}
-              placeholder='default value' onChange={this.changeDefaultValue}/> :
-            <Multiselect messages={{createNew: 'Enter to add'}}
-              onCreate={this.addDefaultValue}
-              defaultValue={this.state.defaultValue} onKeyDown={this.preventEnter}
-            />
-          )}
-
-          { this.props.entryMode && (param.collectionType === 'SINGLE' ?
-            <input type='text' className='form-control' required value={this.state.defaultValue}
-              placeholder='default value' onChange={this.changeDefaultValue}/> :
-            <Multiselect messages={{createNew: 'Enter to add'}}
-               onCreate={this.addDefaultValue}
-              defaultValue={this.state.defaultValue} onKeyDown={this.preventEnter}
-            />
-          )}
+          {valueBox}
         </td>
       </tr>
     );
   }
 
-  // these methods change the default values
-  // called by normal input
-  changeDefaultValue (e) {
-    let val = validate(e.target.value, this.state.dataType);
-
-    if (val[0]) this.setState({defaultValue: val[1]});
+  _handleChange (elementType) {
+    return (arg) => {
+      let paramChange;
+      let state = _.assign({}, this.state.paramChange);
+      let val;
+      switch (elementType) {
+        case 'ChangeMultiselect':
+          paramChange = _.assign({}, state, {defaultValue: arg});
+          break;
+
+        case 'ChangeDefaultTextValue':
+          paramChange = _.assign({}, state, {defaultValue: arg.target.value});
+          break;
+
+        case 'AddItemInMultiSelect':
+          this.state.paramChange.defaultValue.push(arg);
+          paramChange = _.assign({}, this.state.paramChange, {
+            //defaultValue: [...this.state.paramChange.defaultValue, item]
+          });
+          break;
+
+        case 'ChangeDataType':
+          val = this.state.paramChange.collectionType === 'SINGLE' ? null : [];
+          paramChange = _.assign({}, state, {
+            dataType: arg.target.value,
+            defaultValue: val,
+          });
+          break;
+
+        case 'ChangeCollectionType':
+          val = arg.target.value === 'MULTIPLE' ? [] : null;
+          paramChange = _.assign({}, state, {
+            collectionType: arg.target.value,
+            defaultValue: val
+          });
+          break;
+
+        case 'ChangeDisplayName':
+          paramChange = _.assign({}, state, {displayName: arg.target.value});
+          break;
+      }
+
+      this.setState({paramChange});
+      this.props.saveParamChanges(paramChange);
+    };
   }
 
-  // called my multiselect
-  addDefaultValue (item) {
-    let val = validate(item, this.state.dataType);
-
-    if (val[0]) {
-      this.state.defaultValue.push(val[1]);
-      this.setState(this.state);
-    }
-  }
 
   preventEnter (e) {
     if (e.keyCode == 13) e.preventDefault();
   }
 
-  changeDataType (e) {
-    let val = this.state.collectionType === 'SINGLE' ? null : [];
-    this.setState({dataType: e.target.value, defaultValue: val});
-  }
-
-  changeCollectionType (e) {
-    let val = e.target.value === 'MULTIPLE' ? [] : null;
-    this.setState({defaultValue: val});
-    this.setState({collectionType: e.target.value});
-  }
-
-  changeDisplayName (e) {
-    this.setState({displayName: e.target.value});
+  getDefaultValueInput (collectionType, dataType) {
+    let valueBox = null;
+    if (collectionType === 'SINGLE') {
+      valueBox = <input type='text' className='form-control' required value={this.state.paramChange.defaultValue}
+        placeholder='default value' onChange={this._handleChange('ChangeDefaultTextValue')}/>;
+    } else if (collectionType === 'MULTIPLE') {
+      valueBox = <Multiselect messages={{createNew: 'Enter to add'}}
+         onCreate={this._handleChange('AddItemInMultiSelect')} data={this.state.paramChange.defaultValue}
+         onChange={this._handleChange('ChangeMultiselect')}
+        value={this.state.paramChange.defaultValue} onKeyDown={this.preventEnter}
+      />;
+    }
+    return valueBox;
   }
 }
 
 QueryParamRow.propTypes = {
   param: React.PropTypes.object.isRequired,
-  updateParam: React.PropTypes.func.isRequired,
+  saveParamChanges: React.PropTypes.func.isRequired,
   entryMode: React.PropTypes.boolean
 };
 

http://git-wip-us.apache.org/repos/asf/lens/blob/2870be7c/lens-ui/app/components/QueryParamsComponent.js
----------------------------------------------------------------------
diff --git a/lens-ui/app/components/QueryParamsComponent.js b/lens-ui/app/components/QueryParamsComponent.js
index a49e338..c3325c8 100644
--- a/lens-ui/app/components/QueryParamsComponent.js
+++ b/lens-ui/app/components/QueryParamsComponent.js
@@ -26,28 +26,35 @@ import QueryParamRow from './QueryParamRowComponent';
 class QueryParams extends React.Component {
   constructor (props) {
     super(props);
-    this.state = {description: '', childrenParams: {}, runImmediately: false};
+    this.state = {
+      paramChanges: [],
+      runImmediately: false,
+      description: props.description
+    };
 
     this.close = this.close.bind(this);
     this.save = this.save.bind(this);
-    this.update = this.update.bind(this);
     this.handleChange = this.handleChange.bind(this);
     this.handleCheck = this.handleCheck.bind(this);
-    this._getChildrenParams = this._getChildrenParams.bind(this);
+    this.saveParamChanges = this.saveParamChanges.bind(this);
   }
 
   componentWillReceiveProps (props) {
-    if (!_.isEqual(props.params, this.props.params)) {
-      this.state.childrenParams = {};
-    }
+    this.setState({description: props.description, paramChanges: []});
   }
 
   render () {
-    let params = this.props.params && this.props.params.map((param, index) => {
-      return <QueryParamRow key={param.name} param={param} updateParam={this.update}/>;
-    });
-
-    if (!params) return null;
+    let propParams = this.props.params;
+    if (!propParams) return null;
+
+    let changedParams = this.state.paramChanges;
+    let params = this.mergeParamChanges(propParams, changedParams)
+      .map(param => {
+        return (
+          <QueryParamRow key={param.name} param={param}
+            saveParamChanges={this.saveParamChanges} />
+        );
+      });
 
     return (
       <form onSubmit={this.save} style={{padding: '10px', boxShadow: '2px 2px 2px 2px grey',
@@ -72,7 +79,7 @@ class QueryParams extends React.Component {
         <div className='form-group'>
           <label className='sr-only' htmlFor='queryDescription'>Description</label>
           <input type='text' className='form-control' style={{fontWeight: 'normal'}}
-            onChange={this.handleChange} id='queryDescription'
+            onChange={this.handleChange} id='queryDescription' value={this.state.description}
             placeholder='(Optional description) e.g. This awesome query does magic along with its job.'
           />
         </div>
@@ -93,7 +100,8 @@ class QueryParams extends React.Component {
 
   save (e) {
     e.preventDefault();
-    var parameters = this._getChildrenParams();
+    // merges the initial props and the delta changes done by child components
+    var parameters = this.mergeParamChanges(this.props.params, this.state.paramChanges);
     this.props.saveParams({
       parameters: parameters,
       description: this.state.description,
@@ -101,23 +109,39 @@ class QueryParams extends React.Component {
     });
   }
 
-  _getChildrenParams () {
-    return Object.keys(this.state.childrenParams).map(name => {
-      return this.state.childrenParams[name];
-    });
-  }
-
   handleChange (e) {
-    this.setState({description: e.target.value});
+    this.setState({ description: e.target.value });
   }
 
   handleCheck (e) {
-    this.setState({runImmediately: e.target.checked});
+    this.setState({ runImmediately: e.target.checked });
+  }
+
+  mergeParamChanges (original = [], changes = []) {
+    return original.map(originalParam => {
+      let change = changes.filter(changedParam => {
+        return changedParam.name === originalParam.name;
+      });
+      return _.assign({}, originalParam, change[0]);
+    });
   }
 
-  // called by the child component {name, param}
-  update (param) {
-    this.state.childrenParams[param.name] = param.param;
+  saveParamChanges (changedParam) {
+    // getting the param from the paramChanges state.
+    var param = this.state.paramChanges.filter(param => {
+      return param.name === changedParam.name;
+    })[0];
+
+    // apply the changedParam over the above param.
+    var newParam = _.assign({}, param, changedParam);
+
+    // getting all the other changes except the current as
+    // we want to over-write it
+    var newChangedParams = this.state.paramChanges.filter(param => {
+      return param.name !== newParam.name;
+    });
+
+    this.setState({paramChanges: [...newChangedParams, newParam]});
   }
 }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/2870be7c/lens-ui/app/components/SavedQueryPreviewComponent.js
----------------------------------------------------------------------
diff --git a/lens-ui/app/components/SavedQueryPreviewComponent.js b/lens-ui/app/components/SavedQueryPreviewComponent.js
index 4f9459a..eea1e47 100644
--- a/lens-ui/app/components/SavedQueryPreviewComponent.js
+++ b/lens-ui/app/components/SavedQueryPreviewComponent.js
@@ -20,6 +20,7 @@
 import React from 'react';
 import { Link } from 'react-router';
 import CodeMirror from 'codemirror';
+import _ from 'lodash';
 import 'codemirror/mode/sql/sql.js';
 import 'codemirror/addon/runmode/runmode.js';
 
@@ -30,13 +31,16 @@ import UserStore from '../stores/UserStore';
 class SavedQueryPreview extends React.Component {
   constructor (props) {
     super(props);
-    this.state = { showDetail: false, queryParams: {} };
+    this.state = {
+      showDetail: false,
+      queryParams: props.query.parameters.reduce((prev, curr) => {
+        prev[curr.name] = curr;
+        return prev;
+      }, {})
+    };
     this.toggleQueryDetails = this.toggleQueryDetails.bind(this);
     this.runSavedQuery = this.runSavedQuery.bind(this);
     this.update = this.update.bind(this);
-    this.props.query && this.props.query.parameters.forEach(param => {
-      this.state.queryParams[param.name] = param;
-    });
   }
 
   render () {
@@ -58,7 +62,7 @@ class SavedQueryPreview extends React.Component {
 
     let params = query && query.parameters.map(param => {
       return <QueryParamRowComponent param={param} entryMode={true}
-        updateParam={this.update}/>;
+        saveParamChanges={this.update}/>;
     });
 
     let paramsTable = !params.length ? null :
@@ -114,15 +118,17 @@ class SavedQueryPreview extends React.Component {
   }
 
   update (param) {
-    this.state.queryParams[param.name] = param.param;
+    this.setState({
+      queryParams: _.assign({}, this.state.queryParams, {[param.name]: param})
+    });
   }
 
   runSavedQuery () {
     let secretToken = UserStore.getUserDetails().secretToken;
-    let parameters = Object.keys(this.state.queryParams).map(name => {
-      let object = {};
-      object[name] = this.state.queryParams[name].defaultValue;
-      return object;
+    let parameters = Object.keys(this.state.queryParams).map(paramName => {
+      return {
+        [paramName]: this.state.queryParams[paramName].defaultValue
+      };
     });
     AdhocQueryActions.runSavedQuery(secretToken, this.props.query.id, parameters);
   }


[48/50] [abbrv] lens git commit: LENS-836: Query commands in CLI should take default value for query handle as the last executed query

Posted by sh...@apache.org.
LENS-836: Query commands in CLI should take default value for query handle as the last executed query


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/7a89db13
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/7a89db13
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/7a89db13

Branch: refs/heads/LENS-581
Commit: 7a89db13e74984de81d840dc015c4ba59471d785
Parents: 04f5a82
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Thu Dec 24 13:19:12 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Thu Dec 24 13:19:12 2015 +0530

----------------------------------------------------------------------
 .../lens/cli/commands/LensQueryCommands.java    | 76 ++++++++++++--------
 .../apache/lens/cli/TestLensQueryCommands.java  |  7 +-
 .../java/org/apache/lens/client/LensClient.java |  9 +--
 .../org/apache/lens/client/LensStatement.java   | 12 +---
 src/site/apt/user/cli.apt                       | 10 +--
 5 files changed, 62 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/7a89db13/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
index e3c08ff..a29600d 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
@@ -61,6 +61,8 @@ import com.google.common.base.Joiner;
     + "  <<<query execute cube select id,name from dim_table where name != \"\"first\"\">>>,\n"
     + "  will be parsed as <<<cube select id,name from dim_table where name != \"first\">>>")
 public class LensQueryCommands extends BaseLensCommand {
+  private static final String DEFAULT_QUERY_HANDLE_DESCRIPTION =
+    "If not provided, takes last query handle interacted with.";
 
   /**
    * Execute query.
@@ -110,7 +112,6 @@ public class LensQueryCommands extends BaseLensCommand {
    */
   private String formatResultSet(LensClient.LensClientResultSetWithStats rs) {
     StringBuilder b = new StringBuilder();
-    int numRows = 0;
     if (rs.getResultSet() != null) {
       QueryResultSetMetadata resultSetMetadata = rs.getResultSet().getResultSetMetadata();
       for (ResultColumn column : resultSetMetadata.getColumns()) {
@@ -125,7 +126,7 @@ public class LensQueryCommands extends BaseLensCommand {
         PersistentQueryResult temp = (PersistentQueryResult) r;
         b.append("Results of query stored at : ").append(temp.getPersistedURI()).append("  ");
         if (null != temp.getNumRows()) {
-          b.append(temp.getNumRows() + " rows ");
+          b.append(temp.getNumRows()).append(" rows ");
         }
       }
     }
@@ -139,20 +140,33 @@ public class LensQueryCommands extends BaseLensCommand {
     return b.toString();
   }
 
+  public String getOrDefaultQueryHandleString(String queryHandleString) {
+    if (queryHandleString != null) {
+      return queryHandleString;
+    }
+    if (getClient().getStatement().getQuery() != null) {
+      return getClient().getStatement().getQueryHandleString();
+    }
+    throw new IllegalArgumentException("Query handle not provided and no queries interacted with in the session.");
+  }
+
   /**
    * Gets the status.
    *
    * @param qh the qh
    * @return the status
    */
-  @CliCommand(value = "query status", help = "Fetch status of executed query having query handle <query_handle>")
+  @CliCommand(value = "query status",
+    help = "Fetch status of executed query having query handle <query_handle>. " + DEFAULT_QUERY_HANDLE_DESCRIPTION)
   public String getStatus(
-    @CliOption(key = {"", "query_handle"}, mandatory = true, help = "<query_handle>") String qh) {
-    QueryStatus status = getClient().getQueryStatus(new QueryHandle(UUID.fromString(qh)));
+    @CliOption(key = {"", "query_handle"}, mandatory = false, help = "<query_handle>") String qh) {
+    qh = getOrDefaultQueryHandleString(qh);
+    QueryHandle handle = QueryHandle.fromString(qh);
+    QueryStatus status = getClient().getQueryStatus(handle);
     if (status == null) {
-      return "Unable to find status for " + qh;
+      return "Unable to find status for " + handle;
     }
-    return status.toString();
+    return "Query Handle: " + qh + "\n" + status.toString();
   }
 
   /**
@@ -161,15 +175,15 @@ public class LensQueryCommands extends BaseLensCommand {
    * @param qh the qh
    * @return the query
    */
-  @CliCommand(value = "query details", help = "Get query details of query with handle <query_handle>")
+  @CliCommand(value = "query details",
+    help = "Get query details of query with handle <query_handle>." + DEFAULT_QUERY_HANDLE_DESCRIPTION)
   public String getDetails(
-    @CliOption(key = {"", "query_handle"}, mandatory = true, help
-      = "<query_handle>") String qh) {
+    @CliOption(key = {"", "query_handle"}, mandatory = false, help = "<query_handle>") String qh) {
+    qh = getOrDefaultQueryHandleString(qh);
     LensQuery query = getClient().getQueryDetails(qh);
     if (query == null) {
       return "Unable to find query for " + qh;
     }
-
     try {
       return formatJson(mapper.writer(pp).writeValueAsString(query));
     } catch (IOException e) {
@@ -185,10 +199,11 @@ public class LensQueryCommands extends BaseLensCommand {
    * @throws LensAPIException
    * @throws UnsupportedEncodingException the unsupported encoding exception
    */
-  @CliCommand(value = "query explain", help = "Explain execution plan of query <query-string>. "
+  @CliCommand(value = "query explain",
+    help = "Explain execution plan of query <query-string>. "
       + "Can optionally save the plan to a file by providing <save_location>")
-  public String explainQuery(@CliOption(key = { "", "query" }, mandatory = true, help = "<query-string>") String sql,
-      @CliOption(key = { "save_location" }, mandatory = false, help = "<save_location>") final File path)
+  public String explainQuery(@CliOption(key = {"", "query"}, mandatory = true, help = "<query-string>") String sql,
+    @CliOption(key = {"save_location"}, mandatory = false, help = "<save_location>") final File path)
     throws IOException, LensAPIException {
     PrettyPrintable cliOutput;
 
@@ -197,7 +212,7 @@ public class LensQueryCommands extends BaseLensCommand {
       if (path != null && StringUtils.isNotBlank(path.getPath())) {
         String validPath = getValidPath(path, false, false);
         try (OutputStreamWriter osw = new OutputStreamWriter(new FileOutputStream(validPath),
-            Charset.defaultCharset())) {
+          Charset.defaultCharset())) {
           osw.write(plan.getPlanString());
         }
         return "Saved to " + validPath;
@@ -250,9 +265,10 @@ public class LensQueryCommands extends BaseLensCommand {
    * @param qh the qh
    * @return the string
    */
-  @CliCommand(value = "query kill", help = "Kill query with handle <query_handle>")
-  public String killQuery(
-    @CliOption(key = {"", "query_handle"}, mandatory = true, help = "<query_handle>") String qh) {
+  @CliCommand(value = "query kill", help = "Kill query with handle <query_handle>." + DEFAULT_QUERY_HANDLE_DESCRIPTION)
+  public String killQuery(@CliOption(key = {"", "query_handle"}, mandatory = false, help = "<query_handle>") String
+    qh) {
+    qh = getOrDefaultQueryHandleString(qh);
     boolean status = getClient().killQuery(new QueryHandle(UUID.fromString(qh)));
     if (status) {
       return "Successfully killed " + qh;
@@ -268,19 +284,19 @@ public class LensQueryCommands extends BaseLensCommand {
    * @return the query results
    */
   @CliCommand(value = "query results",
-    help = "get results of query with query handle <query_handle>. If async is false "
-      + "then wait till the query execution is completed, it's by default true. "
+    help = "get results of query with query handle <query_handle>. " + DEFAULT_QUERY_HANDLE_DESCRIPTION
+      + "If async is false then wait till the query execution is completed, it's by default true. "
       + "Can optionally save the results to a file by providing <save_location>.")
   public String getQueryResults(
-    @CliOption(key = {"", "query_handle"}, mandatory = true, help = "<query_handle>") String qh,
+    @CliOption(key = {"", "query_handle"}, mandatory = false, help = "<query_handle>") String qh,
     @CliOption(key = {"save_location"}, mandatory = false, help = "<save_location>") final File path,
     @CliOption(key = {"async"}, mandatory = false, unspecifiedDefaultValue = "true",
       help = "<async>") boolean async) {
+    qh = getOrDefaultQueryHandleString(qh);
     QueryHandle queryHandle = new QueryHandle(UUID.fromString(qh));
     LensClient.LensClientResultSetWithStats results;
     String location = path != null ? path.getPath() : null;
     try {
-      String prefix = "";
       if (StringUtils.isNotBlank(location)) {
         location = getValidPath(path, true, true);
         Response response = getClient().getHttpResults(queryHandle);
@@ -289,7 +305,7 @@ public class LensQueryCommands extends BaseLensCommand {
           String fileName = disposition.split("=")[1].trim();
           location = getValidPath(new File(location + File.separator + fileName), false, false);
           try (InputStream stream = response.readEntity(InputStream.class);
-            FileOutputStream outStream = new FileOutputStream(new File(location))) {
+               FileOutputStream outStream = new FileOutputStream(new File(location))) {
             IOUtils.copy(stream, outStream);
           }
           return "Saved to " + location;
@@ -364,7 +380,7 @@ public class LensQueryCommands extends BaseLensCommand {
       StringBuilder sb = new StringBuilder()
         .append("User query:").append(prepared.getUserQuery()).append("\n")
         .append("Prepare handle:").append(prepared.getPrepareHandle()).append("\n")
-        .append("User:" + prepared.getPreparedUser()).append("\n")
+        .append("User:").append(prepared.getPreparedUser()).append("\n")
         .append("Prepared at:").append(prepared.getPreparedTime()).append("\n")
         .append("Selected driver :").append(prepared.getSelectedDriverName()).append("\n")
         .append("Driver query:").append(prepared.getDriverQuery()).append("\n");
@@ -454,19 +470,17 @@ public class LensQueryCommands extends BaseLensCommand {
    *           the unsupported encoding exception
    * @throws LensAPIException
    */
-  @CliCommand(value = "prepQuery explain", help = "Explain and prepare query <query-string>. "
-      + "Can optionally provide <query-name>")
+  @CliCommand(value = "prepQuery explain",
+    help = "Explain and prepare query <query-string>. Can optionally provide <query-name>")
   public String explainAndPrepare(
 
-  @CliOption(key = { "", "query" }, mandatory = true, help = "<query-string>") String sql,
-      @CliOption(key = { "name" }, mandatory = false, help = "<query-name>") String queryName)
+    @CliOption(key = {"", "query"}, mandatory = true, help = "<query-string>") String sql,
+    @CliOption(key = {"name"}, mandatory = false, help = "<query-name>") String queryName)
     throws UnsupportedEncodingException, LensAPIException {
     PrettyPrintable cliOutput;
     try {
       QueryPlan plan = getClient().explainAndPrepare(sql, queryName).getData();
-      StringBuilder planStr = new StringBuilder(plan.getPlanString());
-      planStr.append("\n").append("Prepare handle:").append(plan.getPrepareHandle());
-      return planStr.toString();
+      return plan.getPlanString() + "\n" + "Prepare handle:" + plan.getPrepareHandle();
     } catch (final LensAPIException e) {
       BriefError briefError = new BriefError(e.getLensAPIErrorCode(), e.getLensAPIErrorMessage());
       cliOutput = new IdBriefErrorTemplate(IdBriefErrorTemplateKey.REQUEST_ID, e.getLensAPIRequestId(), briefError);

http://git-wip-us.apache.org/repos/asf/lens/blob/7a89db13/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
index 6ebfff7..2de3cc1 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
@@ -102,6 +102,7 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
     String sql = "cube select id,name from test_dim";
     String result = qCom.executeQuery(sql, false, "testQuery2");
     assertTrue(result.contains("1\tfirst"), result);
+
   }
 
   /**
@@ -237,11 +238,11 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
     String[] resultSplits = result.split("\n");
     // assert on the number of queries
     assertEquals(String.valueOf(resultSplits.length - 1), resultSplits[resultSplits.length - 1].split(": ")[1]);
-
+    assertEquals(qCom.getOrDefaultQueryHandleString(null), qh);
     QueryStatus queryStatus = qCom.getClient().getQueryStatus(qh);
     while (!queryStatus.finished()) {
       if (queryStatus.launched()) {
-        String details = qCom.getDetails(qh);
+        String details = qCom.getDetails(null);
         assertTrue(details.contains("driverQuery"));
       }
       Thread.sleep(1000);
@@ -256,7 +257,7 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
     String details = qCom.getDetails(qh);
     assertTrue(details.contains("driverQuery"));
 
-    result = qCom.getQueryResults(qh, null, true);
+    result = qCom.getQueryResults(null, null, true);
     assertTrue(result.contains("1\tfirst"));
 
     downloadResult(qCom, qh, result);

http://git-wip-us.apache.org/repos/asf/lens/blob/7a89db13/lens-client/src/main/java/org/apache/lens/client/LensClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensClient.java b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
index 8f197e4..f7f99c7 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensClient.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
@@ -57,6 +57,7 @@ public class LensClient {
   private LensConnection connection;
   private final HashMap<QueryHandle, LensStatement> statementMap =
     Maps.newHashMap();
+  @Getter
   private final LensStatement statement;
 
   @Getter
@@ -192,11 +193,11 @@ public class LensClient {
   }
 
   public QueryStatus getQueryStatus(QueryHandle query) {
-    return new LensStatement(connection).getQuery(query).getStatus();
+    return statement.getQuery(query).getStatus();
   }
 
   public LensQuery getQueryDetails(QueryHandle handle) {
-    return new LensStatement(connection).getQuery(handle);
+    return statement.getQuery(handle);
   }
 
   public QueryStatus getQueryStatus(String q) {
@@ -208,7 +209,7 @@ public class LensClient {
   }
 
   public LensAPIResult<QueryPlan> getQueryPlan(String q) throws LensAPIException {
-    return new LensStatement(connection).explainQuery(q);
+    return statement.explainQuery(q);
   }
 
   public boolean killQuery(QueryHandle q) {
@@ -228,7 +229,7 @@ public class LensClient {
 
   public List<QueryHandle> getQueries(String state, String queryName, String user, String driver, long fromDate,
     long toDate) {
-    return new LensStatement(connection).getAllQueries(state, queryName, user, driver, fromDate, toDate);
+    return statement.getAllQueries(state, queryName, user, driver, fromDate, toDate);
   }
 
   private void connectToLensServer() {

http://git-wip-us.apache.org/repos/asf/lens/blob/7a89db13/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensStatement.java b/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
index 71caa48..8de7708 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
@@ -40,9 +40,12 @@ import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataMultiPart;
 
+import lombok.RequiredArgsConstructor;
+
 /**
  * Top level class which is used to execute lens queries.
  */
+@RequiredArgsConstructor
 public class LensStatement {
 
   /** The connection. */
@@ -52,15 +55,6 @@ public class LensStatement {
   private LensQuery query;
 
   /**
-   * Instantiates a new lens statement.
-   *
-   * @param connection the connection
-   */
-  public LensStatement(LensConnection connection) {
-    this.connection = connection;
-  }
-
-  /**
    * Execute.
    *
    * @param sql                    the sql

http://git-wip-us.apache.org/repos/asf/lens/blob/7a89db13/src/site/apt/user/cli.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/user/cli.apt b/src/site/apt/user/cli.apt
index 3db53c4..65380e6 100644
--- a/src/site/apt/user/cli.apt
+++ b/src/site/apt/user/cli.apt
@@ -347,19 +347,19 @@ User CLI Commands
 *--+--+
 |prepQuery prepare [--query] \<query-string\> [--name \<query-name\>]|Prepapre query <<<query-string>>> and return prepare handle. Can optionaly provide <<<query-name>>>|
 *--+--+
-|query details [--query_handle] \<query_handle\>|Get query details of query with handle <<<query_handle>>>|
+|query details [[--query_handle] \<query_handle\>]|Get query details of query with handle <<<query_handle>>>.If not provided, takes last query handle interacted with.|
 *--+--+
 |query execute [--query] \<query-string\> [--async \<async\>] [--name \<query-name\>]|Execute query <<<query-string>>>. If <<<async>>> is true, The query is launched in async manner and query handle is returned. It's by default false. <<<query name>>> can also be provided, though not required|
 *--+--+
 |query explain [--query] \<query-string\> [--save_location \<save_location\>]|Explain execution plan of query <<<query-string>>>. Can optionally save the plan to a file by providing <<<save_location>>>|
 *--+--+
-|query kill [--query_handle] \<query_handle\>|Kill query with handle <<<query_handle>>>|
+|query kill [[--query_handle] \<query_handle\>]|Kill query with handle <<<query_handle>>>.If not provided, takes last query handle interacted with.|
 *--+--+
-|query list [--state \<query-status\>] [--name \<query-name\>] [--user \<user-who-submitted-query\>] [--driver \<driver-where-query-was-executed\>] [--fromDate \<submission-time-is-after\>] [--toDate \<submission-time-is-before\>]|Get all queries. Various filter options can be provided(optionally),  as can be seen from the command syntax|
+|query list [--state \<query-status\>] [--name \<query-name\>] [--user \<user-who-submitted-query\>] [--driver \<driver-where-query-ran\>] [--fromDate \<submission-time-is-after\>] [--toDate \<submission-time-is-before\>]|Get all queries. Various filter options can be provided(optionally),  as can be seen from the command syntax|
 *--+--+
-|query results [--query_handle] \<query_handle\> [--save_location \<save_location\>] [--async \<async\>]|get results of query with query handle <<<query_handle>>>. If async is false then wait till the query execution is completed, it's by default true. Can optionally save the results to a file by providing <<<save_location>>>.|
+|query results [[--query_handle] \<query_handle\>] [--save_location \<save_location\>] [--async \<async\>]|get results of query with query handle <<<query_handle>>>. If not provided, takes last query handle interacted with.If async is false then wait till the query execution is completed, it's by default true. Can optionally save the results to a file by providing <<<save_location>>>.|
 *--+--+
-|query status [--query_handle] \<query_handle\>|Fetch status of executed query having query handle <<<query_handle>>>|
+|query status [[--query_handle] \<query_handle\>]|Fetch status of executed query having query handle <<<query_handle>>>. If not provided, takes last query handle interacted with.|
 *--+--+
   <<Lens Query Commands>>
 


[22/50] [abbrv] lens git commit: LENS-871 : Fix Dropping any partition in dimtable is clearing latest cache for that dimtable.

Posted by sh...@apache.org.
LENS-871 : Fix Dropping any partition in dimtable is clearing latest cache for that dimtable.


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/87049563
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/87049563
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/87049563

Branch: refs/heads/LENS-581
Commit: 87049563a4cbc20cf510f6906bf67ddd330ef508
Parents: f7ab827
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Wed Nov 25 16:53:23 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Nov 25 16:53:23 2015 +0530

----------------------------------------------------------------------
 .../lens/cube/metadata/CubeFactTable.java       |    2 +-
 .../lens/cube/metadata/CubeMetastoreClient.java |  273 ++-
 .../lens/cube/metadata/FactPartition.java       |    2 +-
 .../lens/cube/metadata/MetastoreUtil.java       |  122 +-
 .../org/apache/lens/cube/metadata/Storage.java  |    2 +-
 .../cube/metadata/StoragePartitionDesc.java     |    2 +-
 .../lens/cube/metadata/StorageTableDesc.java    |   21 +
 .../lens/cube/metadata/TimePartition.java       |    4 +-
 .../apache/lens/cube/metadata/UpdatePeriod.java |   42 +-
 .../org/apache/lens/cube/parse/DateUtil.java    |   13 -
 .../cube/metadata/TestCubeMetastoreClient.java  | 1740 +++++++-----------
 .../lens/cube/metadata/TestFactPartition.java   |    4 +-
 .../lens/cube/metadata/TestTimePartition.java   |    2 +-
 .../lens/cube/metadata/UpdatePeriodTest.java    |    2 +-
 .../apache/lens/cube/parse/CubeTestSetup.java   |   52 +-
 .../lens/cube/parse/TestBaseCubeQueries.java    |    4 +-
 .../cube/parse/TestBetweenTimeRangeWriter.java  |    2 +-
 .../lens/cube/parse/TestORTimeRangeWriter.java  |   14 +-
 .../apache/lens/driver/hive/TestHiveDriver.java |    2 +-
 .../apache/lens/server/api/util/LensUtil.java   |   10 +
 .../metastore/CubeMetastoreServiceImpl.java     |    8 +-
 .../apache/lens/server/metastore/JAXBUtils.java |    2 +-
 .../server/metastore/TestMetastoreService.java  |  343 ++--
 23 files changed, 1167 insertions(+), 1501 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
index a7a5bb0..d6bfb79 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
@@ -154,7 +154,7 @@ public class CubeFactTable extends AbstractCubeTable {
     List<String> partitions = new ArrayList<String>();
     Date dt = cal.getTime();
     while (dt.compareTo(toDate) < 0) {
-      String part = interval.format().format(cal.getTime());
+      String part = interval.format(cal.getTime());
       partitions.add(part);
       cal.add(interval.calendarField(), 1);
       dt = cal.getTime();

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index 1f13617..e7550ca 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -19,7 +19,7 @@
 
 package org.apache.lens.cube.metadata;
 
-import static org.apache.lens.cube.metadata.MetastoreUtil.getFactOrDimtableStorageTableName;
+import static org.apache.lens.cube.metadata.MetastoreUtil.*;
 
 import java.text.ParseException;
 import java.util.*;
@@ -31,6 +31,7 @@ import org.apache.lens.cube.metadata.Storage.LatestPartColumnInfo;
 import org.apache.lens.cube.metadata.timeline.PartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.PartitionTimelineFactory;
 import org.apache.lens.server.api.error.LensException;
+import org.apache.lens.server.api.util.LensUtil;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -103,9 +104,9 @@ public class CubeMetastoreClient {
    * latest date for a single fact-storage table for given time dimension is the latest of the latest dates for all its
    * update periods
    *
-   * @param cube
-   * @param timeDimension
-   * @return
+   * @param cube             Cube to get latest date of
+   * @param timeDimension    time dimension
+   * @return                 latest date among all facts of cube in timeDimension
    * @throws HiveException
    * @throws LensException
    */
@@ -213,7 +214,7 @@ public class CubeMetastoreClient {
           latestPartSpec.put(timePartCol, partSpec.get(timePartCol));
           if (partSpec.equals(latestPartSpec)) {
             latestPart.getParameters().putAll(partition.getParameters());
-            latestPart.getParameters().put(MetastoreUtil.getLatestPartTimestampKey(timePartCol),
+            latestPart.getParameters().put(getLatestPartTimestampKey(timePartCol),
               partSpec.get(timePartCol));
             latestPart.getTPartition().getSd().getSerdeInfo().getParameters().putAll(
               partition.getTPartition().getSd().getSerdeInfo().getParameters());
@@ -244,12 +245,11 @@ public class CubeMetastoreClient {
       CaseInsensitiveStringHashMap<// partition column
         PartitionTimeline>>> {
     /**
-     * Returns true if all the timelines for fact-storage table are empty for all valid update periods.
      *
-     * @param fact
-     * @param storage
-     * @param partCol
-     * @return
+     * @param fact      fact
+     * @param storage   storage
+     * @param partCol   part column
+     * @return          true if all the timelines for fact-storage table are empty for all valid update periods.
      * @throws HiveException
      * @throws LensException
      */
@@ -273,22 +273,21 @@ public class CubeMetastoreClient {
      * properties, it'll get all partitions, compute timelines in memory, write back all loads timelines to table
      * properties for further usage and return them.
      *
-     * @param fact
-     * @param storage
-     * @return
+     * @param fact          fact
+     * @param storage       storage
+     * @return              all timelines for fact-storage pair. Load from properties/all partitions if needed.
      * @throws HiveException
      * @throws LensException
      */
     public TreeMap<UpdatePeriod, CaseInsensitiveStringHashMap<PartitionTimeline>> get(String fact, String storage)
       throws HiveException, LensException {
       // SUSPEND CHECKSTYLE CHECK DoubleCheckedLockingCheck
-      String storageTableName = MetastoreUtil.getStorageTableName(fact, Storage.getPrefix(storage));
+      String storageTableName = getStorageTableName(fact, Storage.getPrefix(storage));
       if (get(storageTableName) == null) {
         synchronized (this) {
           if (get(storageTableName) == null) {
             Table storageTable = getTable(storageTableName);
-            if ("true".equalsIgnoreCase(storageTable.getParameters().get(
-              MetastoreUtil.getPartitionTimelineCachePresenceKey()))) {
+            if ("true".equalsIgnoreCase(storageTable.getParameters().get(getPartitionTimelineCachePresenceKey()))) {
               try {
                 loadTimelinesFromTableProperties(fact, storage);
               } catch (Exception e) {
@@ -313,7 +312,7 @@ public class CubeMetastoreClient {
       // Not found in table properties either, compute from all partitions of the fact-storage table.
       // First make sure all combinations of update period and partition column have an entry even
       // if no partitions exist
-      String storageTableName = MetastoreUtil.getStorageTableName(fact, Storage.getPrefix(storage));
+      String storageTableName = getStorageTableName(fact, Storage.getPrefix(storage));
       log.info("loading from all partitions: {}", storageTableName);
       Table storageTable = getTable(storageTableName);
       if (getCubeFact(fact).getUpdatePeriods() != null && getCubeFact(fact).getUpdatePeriods().get(
@@ -349,7 +348,7 @@ public class CubeMetastoreClient {
 
     private void loadTimelinesFromTableProperties(String fact, String storage) throws HiveException, LensException {
       // found in table properties, load from there.
-      String storageTableName = MetastoreUtil.getStorageTableName(fact, Storage.getPrefix(storage));
+      String storageTableName = getStorageTableName(fact, Storage.getPrefix(storage));
       log.info("loading from table properties: {}", storageTableName);
       for (UpdatePeriod updatePeriod : getCubeFact(fact).getUpdatePeriods().get(storage)) {
         for (String partCol : getTimePartColNamesOfTable(storageTableName)) {
@@ -362,10 +361,10 @@ public class CubeMetastoreClient {
      * Adds given partition(for storageTable, updatePeriod, partitionColum=partition) for batch addition in an
      * appropriate timeline object. Ignore if partition is not valid.
      *
-     * @param storageTable
-     * @param updatePeriod
-     * @param partitionColumn
-     * @param partition
+     * @param storageTable      storage table
+     * @param updatePeriod      update period
+     * @param partitionColumn   partition column
+     * @param partition         partition
      */
     public void addForBatchAddition(String storageTable, UpdatePeriod updatePeriod, String partitionColumn,
       String partition) {
@@ -383,9 +382,9 @@ public class CubeMetastoreClient {
      * <p></p>
      * kind of like mkdir -p
      *
-     * @param storageTable
-     * @param updatePeriod
-     * @param partitionColumn
+     * @param storageTable    storage table
+     * @param updatePeriod    update period
+     * @param partitionColumn partition column
      * @return timeline if already exists, or puts a new timeline and returns.
      */
     public PartitionTimeline ensureEntry(String storageTable, UpdatePeriod updatePeriod, String partitionColumn) {
@@ -405,7 +404,7 @@ public class CubeMetastoreClient {
     /**
      * commit all batch addition for all its timelines.
      *
-     * @param storageTable
+     * @param storageTable   storage table
      * @throws HiveException
      * @throws LensException
      */
@@ -451,7 +450,6 @@ public class CubeMetastoreClient {
       return timeline;
     }
 
-
     /** update partition timeline cache for addition of time partition */
     public void updateForAddition(String cubeTableName, String storageName, UpdatePeriod updatePeriod,
       Map<String, TreeSet<Date>> timePartSpec) throws HiveException, LensException {
@@ -490,8 +488,8 @@ public class CubeMetastoreClient {
   /**
    * Get the instance of {@link CubeMetastoreClient} corresponding to {@link HiveConf}
    *
-   * @param conf
-   * @return CubeMetastoreClient
+   * @param conf                  conf
+   * @return                      CubeMetastoreClient instance
    * @throws HiveException
    */
   public static CubeMetastoreClient getInstance(HiveConf conf) throws HiveException {
@@ -826,7 +824,7 @@ public class CubeMetastoreClient {
 
   private List<Partition> addPartitions(String factOrDimTable, String storageName, UpdatePeriod updatePeriod,
     List<StoragePartitionDesc> storagePartitionDescs) throws HiveException, LensException {
-    String storageTableName = MetastoreUtil.getStorageTableName(factOrDimTable.trim(),
+    String storageTableName = getStorageTableName(factOrDimTable.trim(),
       Storage.getPrefix(storageName.trim())).toLowerCase();
     if (getDimensionTable(factOrDimTable) != null) {
       // Adding partition in dimension table.
@@ -855,7 +853,7 @@ public class CubeMetastoreClient {
       List<Partition> partsAdded =
         getStorage(storageName).addPartitions(getClient(), factOrDimTable, updatePeriod, storagePartitionDescs, null);
       // update hive table
-      alterTablePartitionCache(MetastoreUtil.getStorageTableName(factOrDimTable, Storage.getPrefix(storageName)));
+      alterTablePartitionCache(getStorageTableName(factOrDimTable, Storage.getPrefix(storageName)));
       return partsAdded;
     }
   }
@@ -892,7 +890,7 @@ public class CubeMetastoreClient {
   /**
    * store back all timelines of given storage table to table properties
    *
-   * @param storageTableName
+   * @param storageTableName  storage table name
    * @throws HiveException
    */
   private void alterTablePartitionCache(String storageTableName) throws HiveException {
@@ -905,7 +903,7 @@ public class CubeMetastoreClient {
           entry.getValue().updateTableParams(table);
         }
       }
-      params.put(MetastoreUtil.getPartitionTimelineCachePresenceKey(), "true");
+      params.put(getPartitionTimelineCachePresenceKey(), "true");
       alterHiveTable(storageTableName, table);
     }
   }
@@ -930,14 +928,14 @@ public class CubeMetastoreClient {
         boolean makeLatest = true;
         Partition part = getLatestPart(storageTableName, partCol, nonTimeParts);
         Date pTimestamp = timePartSpecs.get(partCol).last();
-        Date latestTimestamp = MetastoreUtil.getLatestTimeStampOfDimtable(part, partCol);
+        Date latestTimestamp = getLatestTimeStampFromPartition(part, partCol);
         if (latestTimestamp != null && pTimestamp.before(latestTimestamp)) {
           makeLatest = false;
         }
 
         if (makeLatest) {
-          Map<String, String> latestParams = new HashMap<String, String>();
-          latestParams.put(MetastoreUtil.getLatestPartTimestampKey(partCol), updatePeriod.format().format(pTimestamp));
+          Map<String, String> latestParams = LensUtil.getHashMap(getLatestPartTimestampKey(partCol),
+            updatePeriod.format(pTimestamp));
           latest.latestParts.put(partCol, new LatestPartColumnInfo(latestParams));
         }
       }
@@ -970,7 +968,7 @@ public class CubeMetastoreClient {
     if (updatePeriodStr != null) {
       UpdatePeriod partInterval = UpdatePeriod.valueOf(updatePeriodStr);
       try {
-        partDate = partInterval.format().parse(partVal);
+        partDate = partInterval.parse(partVal);
       } catch (ParseException e) {
         // ignore
       }
@@ -982,17 +980,18 @@ public class CubeMetastoreClient {
     UpdatePeriod updatePeriod, Map<String, String> nonTimePartSpec)
     throws HiveException {
     // getClient().getPartitionsByNames(tbl, partNames)
-    List<Partition> partitions = null;
+    List<Partition> partitions;
     try {
       partitions = getClient().getPartitionsByFilter(hiveTable, StorageConstants.getPartFilter(nonTimePartSpec));
-      MetastoreUtil.filterPartitionsByNonTimeParts(partitions, nonTimePartSpec, timeCol);
+      filterPartitionsByUpdatePeriod(partitions, updatePeriod);
+      filterPartitionsByNonTimeParts(partitions, nonTimePartSpec, timeCol);
     } catch (TException e) {
       throw new HiveException(e);
     }
 
     // tree set contains partitions with timestamp as value for timeCol, in
     // descending order
-    TreeSet<Partition> allPartTimeVals = new TreeSet<Partition>(new Comparator<Partition>() {
+    TreeSet<Partition> allPartTimeVals = new TreeSet<>(new Comparator<Partition>() {
       @Override
       public int compare(Partition o1, Partition o2) {
         Date partDate1 = getPartDate(o1, timeColIndex);
@@ -1001,7 +1000,7 @@ public class CubeMetastoreClient {
           return -1;
         } else if (partDate1 == null && partDate2 != null) {
           return 1;
-        } else if (partDate1 == null && partDate2 == null) {
+        } else if (partDate1 == null) {
           return o2.getTPartition().compareTo(o1.getTPartition());
         } else if (!partDate2.equals(partDate1)) {
           return partDate2.compareTo(partDate1);
@@ -1025,9 +1024,8 @@ public class CubeMetastoreClient {
       Partition nextLatest = it.next();
       latest = new LatestInfo();
       latest.setPart(nextLatest);
-      Map<String, String> latestParams = new HashMap<String, String>();
-      String partVal = nextLatest.getValues().get(timeColIndex);
-      latestParams.put(MetastoreUtil.getLatestPartTimestampKey(timeCol), partVal);
+      Map<String, String> latestParams = LensUtil.getHashMap(getLatestPartTimestampKey(timeCol),
+        nextLatest.getValues().get(timeColIndex));
       latest.addLatestPartInfo(timeCol, new LatestPartColumnInfo(latestParams));
     }
     return latest;
@@ -1045,16 +1043,16 @@ public class CubeMetastoreClient {
    */
   public void dropPartition(String cubeTableName, String storageName, Map<String, Date> timePartSpec,
     Map<String, String> nonTimePartSpec, UpdatePeriod updatePeriod) throws HiveException, LensException {
-    String storageTableName = MetastoreUtil.getStorageTableName(cubeTableName.trim(),
+    String storageTableName = getStorageTableName(cubeTableName.trim(),
       Storage.getPrefix(storageName.trim())).toLowerCase();
     Table hiveTable = getHiveTable(storageTableName);
     List<FieldSchema> partCols = hiveTable.getPartCols();
-    List<String> partColNames = new ArrayList<String>(partCols.size());
-    List<String> partVals = new ArrayList<String>(partCols.size());
+    List<String> partColNames = new ArrayList<>(partCols.size());
+    List<String> partVals = new ArrayList<>(partCols.size());
     for (FieldSchema column : partCols) {
       partColNames.add(column.getName());
       if (timePartSpec.containsKey(column.getName())) {
-        partVals.add(updatePeriod.format().format(timePartSpec.get(column.getName())));
+        partVals.add(updatePeriod.format(timePartSpec.get(column.getName())));
       } else if (nonTimePartSpec.containsKey(column.getName())) {
         partVals.add(nonTimePartSpec.get(column.getName()));
       } else {
@@ -1063,7 +1061,7 @@ public class CubeMetastoreClient {
     }
     if (isDimensionTable(cubeTableName)) {
       String timePartColsStr = hiveTable.getTTable().getParameters().get(MetastoreConstants.TIME_PART_COLUMNS);
-      Map<String, LatestInfo> latest = new HashMap<String, Storage.LatestInfo>();
+      Map<String, LatestInfo> latest = new HashMap<>();
       boolean latestAvailable = false;
       if (timePartColsStr != null) {
         List<String> timePartCols = Arrays.asList(StringUtils.split(timePartColsStr, ','));
@@ -1074,30 +1072,30 @@ public class CubeMetastoreClient {
           int timeColIndex = partColNames.indexOf(timeCol);
           Partition part = getLatestPart(storageTableName, timeCol, nonTimePartSpec);
 
-          boolean isLatest = true;
+          Date latestTimestamp = getLatestTimeStampFromPartition(part, timeCol);
+          Date dropTimestamp;
+          try {
+            dropTimestamp = updatePeriod.parse(updatePeriod.format(timePartSpec.get(timeCol)));
+          } catch (ParseException e) {
+            throw new HiveException(e);
+          }
           // check if partition being dropped is the latest partition
-          for (int i = 0; i < partVals.size(); i++) {
-            if (i != timeColIndex) {
-              if (!part.getValues().get(i).equals(partVals.get(i))) {
-                isLatest = false;
-                break;
+          boolean isLatest = latestTimestamp != null && dropTimestamp.equals(latestTimestamp);
+          if (isLatest) {
+            for (int i = 0; i < partVals.size(); i++) {
+              if (i != timeColIndex) {
+                if (!part.getValues().get(i).equals(partVals.get(i))) {
+                  isLatest = false;
+                  break;
+                }
               }
             }
           }
           if (isLatest) {
-            Date latestTimestamp = MetastoreUtil.getLatestTimeStampOfDimtable(part, timeCol);
-            Date dropTimestamp;
-            try {
-              dropTimestamp = updatePeriod.format().parse(updatePeriod.format().format(timePartSpec.get(timeCol)));
-            } catch (ParseException e) {
-              throw new HiveException(e);
-            }
-            if (latestTimestamp != null && dropTimestamp.equals(latestTimestamp)) {
-              LatestInfo latestInfo =
-                getNextLatestOfDimtable(hiveTable, timeCol, timeColIndex, updatePeriod, nonTimePartSpec);
-              latestAvailable = (latestInfo != null && latestInfo.part != null);
-              latest.put(timeCol, latestInfo);
-            }
+            LatestInfo latestInfo =
+              getNextLatestOfDimtable(hiveTable, timeCol, timeColIndex, updatePeriod, nonTimePartSpec);
+            latestAvailable = (latestInfo != null && latestInfo.part != null);
+            latest.put(timeCol, latestInfo);
           } else {
             latestAvailable = true;
           }
@@ -1122,9 +1120,9 @@ public class CubeMetastoreClient {
   }
 
   private Map<String, String> getPartitionSpec(UpdatePeriod updatePeriod, Map<String, Date> partitionTimestamps) {
-    Map<String, String> partSpec = new HashMap<String, String>();
+    Map<String, String> partSpec = new HashMap<>();
     for (Map.Entry<String, Date> entry : partitionTimestamps.entrySet()) {
-      String pval = updatePeriod.format().format(entry.getValue());
+      String pval = updatePeriod.format(entry.getValue());
       partSpec.put(entry.getKey(), pval);
     }
     return partSpec;
@@ -1158,13 +1156,13 @@ public class CubeMetastoreClient {
   }
 
   public boolean partitionExistsByFilter(String cubeTableName, String storageName, String filter) throws HiveException {
-    return partitionExistsByFilter(MetastoreUtil.getStorageTableName(cubeTableName, Storage.getPrefix(storageName)),
+    return partitionExistsByFilter(getStorageTableName(cubeTableName, Storage.getPrefix(storageName)),
       filter);
   }
 
   public boolean partitionExistsByFilter(String storageTableName, String filter) throws HiveException {
     int parts;
-    Table tbl = null;
+    Table tbl;
     try {
       tbl = getTable(storageTableName);
     } catch (Exception e) {
@@ -1199,12 +1197,9 @@ public class CubeMetastoreClient {
     }
   }
 
-  public int getNumPartitionsByFilter(String storageTableName, String filter) throws HiveException, TException {
-    return getClient().getNumPartitionsByFilter(getTable(storageTableName), filter);
-  }
-
   boolean partitionExists(String storageTableName, UpdatePeriod updatePeriod, Map<String, Date> partitionTimestamps,
-    Map<String, String> partSpec) throws HiveException {
+    Map<String, String> nonTimePartSpec) throws HiveException {
+    HashMap<String, String> partSpec = new HashMap<>(nonTimePartSpec);
     partSpec.putAll(getPartitionSpec(updatePeriod, partitionTimestamps));
     return partitionExists(storageTableName, partSpec);
   }
@@ -1228,7 +1223,7 @@ public class CubeMetastoreClient {
 
   boolean latestPartitionExists(String factOrDimTblName, String storageName, String latestPartCol)
     throws HiveException, LensException {
-    String storageTableName = MetastoreUtil.getStorageTableName(factOrDimTblName, Storage.getPrefix(storageName));
+    String storageTableName = getStorageTableName(factOrDimTblName, Storage.getPrefix(storageName));
     if (isDimensionTable(factOrDimTblName)) {
       return dimTableLatestPartitionExists(storageTableName);
     } else {
@@ -1258,17 +1253,14 @@ public class CubeMetastoreClient {
   /**
    * Get the hive {@link Table} corresponding to the name
    *
-   * @param tableName
-   * @return {@link Table} object
+   * @param tableName table name
+   * @return {@link Table} object corresponding to the name
    * @throws HiveException
    */
   public Table getHiveTable(String tableName) throws HiveException {
     return getTable(tableName);
   }
-  public List<String> getTimePartColNamesOfTable(String tblName, String storageName) throws HiveException {
-    return getTimePartColNamesOfTable(getFactOrDimtableStorageTableName(tblName,
-      storageName));
-  }
+
   public List<String> getTimePartColNamesOfTable(String storageTableName) throws HiveException {
     return getTimePartColNamesOfTable(getTable(storageTableName));
   }
@@ -1340,10 +1332,8 @@ public class CubeMetastoreClient {
   }
 
   boolean isFactTableForCube(Table tbl, String cube) {
-    if (isFactTable(tbl)) {
-      return CubeFactTable.getCubeName(tbl.getTableName(), tbl.getParameters()).equalsIgnoreCase(cube.toLowerCase());
-    }
-    return false;
+    return isFactTable(tbl) && CubeFactTable.getCubeName(tbl.getTableName(), tbl.getParameters())
+      .equalsIgnoreCase(cube.toLowerCase());
   }
 
   /**
@@ -1404,8 +1394,8 @@ public class CubeMetastoreClient {
   /**
    * Is the hive table a cube table?
    *
-   * @param tbl
-   * @return
+   * @param tbl table
+   * @return    whether it's a cube table or not
    * @throws HiveException
    */
   boolean isCube(Table tbl) throws HiveException {
@@ -1416,8 +1406,8 @@ public class CubeMetastoreClient {
   /**
    * Is the hive table a dimension?
    *
-   * @param tbl
-   * @return
+   * @param tbl  table
+   * @return     whether the hive table is a dimension or not
    * @throws HiveException
    */
   boolean isDimension(Table tbl) throws HiveException {
@@ -1440,8 +1430,8 @@ public class CubeMetastoreClient {
   /**
    * Is the hive table a storage
    *
-   * @param tbl
-   * @return
+   * @param tbl table
+   * @return    whether the hive table is a storage
    * @throws HiveException
    */
   boolean isStorage(Table tbl) throws HiveException {
@@ -1481,10 +1471,10 @@ public class CubeMetastoreClient {
           Table tbl = getTable(tableName);
           if (isDimensionTable(tbl)) {
             dimTable = getDimensionTable(tbl);
-            if (enableCaching && dimTable != null) {
+            if (enableCaching) {
               allDimTables.put(tableName, dimTable);
               // update latest partition cache for all storages
-              if (dimTable.getStorages() != null && !dimTable.getStorages().isEmpty()) {
+              if (!dimTable.getStorages().isEmpty()) {
                 for (String storageName : dimTable.getStorages()) {
                   if (dimTable.hasStorageSnapshots(storageName)) {
                     String storageTableName = getFactOrDimtableStorageTableName(dimTable.getName(),
@@ -1630,7 +1620,7 @@ public class CubeMetastoreClient {
   }
 
   private CubeInterface getCube(Table tbl) throws HiveException {
-    String parentCube = tbl.getParameters().get(MetastoreUtil.getParentCubeNameKey(tbl.getTableName()));
+    String parentCube = tbl.getParameters().get(getParentCubeNameKey(tbl.getTableName()));
     if (parentCube != null) {
       return new DerivedCube(tbl, (Cube) getCube(parentCube));
     } else {
@@ -1650,7 +1640,7 @@ public class CubeMetastoreClient {
    */
   public Collection<CubeDimensionTable> getAllDimensionTables() throws HiveException {
     if (!allDimTablesPopulated) {
-      List<CubeDimensionTable> dimTables = new ArrayList<CubeDimensionTable>();
+      List<CubeDimensionTable> dimTables = new ArrayList<>();
       try {
         for (String table : getAllHiveTableNames()) {
           CubeDimensionTable dim = getDimensionTable(table);
@@ -1676,7 +1666,7 @@ public class CubeMetastoreClient {
    */
   public Collection<Storage> getAllStorages() throws HiveException {
     if (!allStoragesPopulated) {
-      List<Storage> storages = new ArrayList<Storage>();
+      List<Storage> storages = new ArrayList<>();
       try {
         for (String table : getAllHiveTableNames()) {
           Storage storage = getStorage(table);
@@ -1702,7 +1692,7 @@ public class CubeMetastoreClient {
    */
   public Collection<CubeInterface> getAllCubes() throws HiveException {
     if (!allCubesPopulated) {
-      List<CubeInterface> cubes = new ArrayList<CubeInterface>();
+      List<CubeInterface> cubes = new ArrayList<>();
       try {
         for (String table : getAllHiveTableNames()) {
           CubeInterface cube = getCube(table);
@@ -1728,7 +1718,7 @@ public class CubeMetastoreClient {
    */
   public Collection<Dimension> getAllDimensions() throws HiveException {
     if (!allDimensionsPopulated) {
-      List<Dimension> dims = new ArrayList<Dimension>();
+      List<Dimension> dims = new ArrayList<>();
       try {
         for (String table : getAllHiveTableNames()) {
           Dimension dim = getDimension(table);
@@ -1754,7 +1744,7 @@ public class CubeMetastoreClient {
    */
   public Collection<CubeFactTable> getAllFacts() throws HiveException {
     if (!allFactTablesPopulated) {
-      List<CubeFactTable> facts = new ArrayList<CubeFactTable>();
+      List<CubeFactTable> facts = new ArrayList<>();
       try {
         for (String table : getAllHiveTableNames()) {
           CubeFactTable fact = getCubeFact(table);
@@ -1801,7 +1791,7 @@ public class CubeMetastoreClient {
       }
       cubeName = cube.getName();
     }
-    List<CubeFactTable> cubeFacts = new ArrayList<CubeFactTable>();
+    List<CubeFactTable> cubeFacts = new ArrayList<>();
     try {
       for (CubeFactTable fact : getAllFacts()) {
         if (cubeName == null || fact.getCubeName().equalsIgnoreCase(cubeName)) {
@@ -1815,27 +1805,6 @@ public class CubeMetastoreClient {
   }
 
   /**
-   * Get all derived cubes of the cube.
-   *
-   * @param cube Cube object
-   * @return List of DerivedCube objects
-   * @throws HiveException
-   */
-  public List<DerivedCube> getAllDerivedCubes(CubeInterface cube) throws HiveException {
-    List<DerivedCube> dcubes = new ArrayList<DerivedCube>();
-    try {
-      for (CubeInterface cb : getAllCubes()) {
-        if (cb.isDerivedCube() && ((DerivedCube) cb).getParent().getName().equalsIgnoreCase(cube.getName())) {
-          dcubes.add((DerivedCube) cb);
-        }
-      }
-    } catch (HiveException e) {
-      throw new HiveException("Could not get all derived cubes of " + cube, e);
-    }
-    return dcubes;
-  }
-
-  /**
    * Get all derived cubes of the cube, that have all fields queryable together
    *
    * @param cube Cube object
@@ -1843,7 +1812,7 @@ public class CubeMetastoreClient {
    * @throws HiveException
    */
   public List<DerivedCube> getAllDerivedQueryableCubes(CubeInterface cube) throws HiveException {
-    List<DerivedCube> dcubes = new ArrayList<DerivedCube>();
+    List<DerivedCube> dcubes = new ArrayList<>();
     try {
       for (CubeInterface cb : getAllCubes()) {
         if (cb.isDerivedCube() && ((DerivedCube) cb).getParent().getName().equalsIgnoreCase(cube.getName())
@@ -1865,7 +1834,7 @@ public class CubeMetastoreClient {
    * @throws HiveException
    */
   public List<CubeDimensionTable> getAllDimensionTables(Dimension dim) throws HiveException {
-    List<CubeDimensionTable> dimTables = new ArrayList<CubeDimensionTable>();
+    List<CubeDimensionTable> dimTables = new ArrayList<>();
     try {
       for (CubeDimensionTable dimTbl : getAllDimensionTables()) {
         if (dim == null || dimTbl.getDimName().equalsIgnoreCase(dim.getName().toLowerCase())) {
@@ -1878,15 +1847,6 @@ public class CubeMetastoreClient {
     return dimTables;
   }
 
-  public List<String> getPartColNames(String tableName) throws HiveException {
-    List<String> partColNames = new ArrayList<String>();
-    Table tbl = getTable(tableName);
-    for (FieldSchema f : tbl.getPartCols()) {
-      partColNames.add(f.getName().toLowerCase());
-    }
-    return partColNames;
-  }
-
   public boolean partColExists(String tableName, String partCol) throws HiveException {
     Table tbl = getTable(tableName);
     for (FieldSchema f : tbl.getPartCols()) {
@@ -1905,11 +1865,11 @@ public class CubeMetastoreClient {
   }
 
   /**
-   * Returns true if columns changed
    *
-   * @param table
-   * @param hiveTable
-   * @param cubeTable
+   * @param table     table name
+   * @param hiveTable hive table
+   * @param cubeTable lens cube table
+   * @return true if columns changed in alter
    * @throws HiveException
    */
   private boolean alterCubeTable(String table, Table hiveTable, AbstractCubeTable cubeTable) throws HiveException {
@@ -1943,11 +1903,6 @@ public class CubeMetastoreClient {
     }
   }
 
-  private void alterHiveTable(String table, Table hiveTable, List<FieldSchema> columns) throws HiveException {
-    hiveTable.getTTable().getSd().setCols(columns);
-    alterHiveTable(table, hiveTable);
-  }
-
   /**
    * Alter cube specified by the name to new definition
    *
@@ -1977,7 +1932,7 @@ public class CubeMetastoreClient {
   public void alterDimension(String dimName, Dimension newDim) throws HiveException {
     Table tbl = getTable(dimName);
     if (isDimension(tbl)) {
-      alterCubeTable(dimName, tbl, (AbstractCubeTable) newDim);
+      alterCubeTable(dimName, tbl, newDim);
       if (enableCaching) {
         allDims.put(dimName.trim().toLowerCase(), getDimension(refreshTable(dimName)));
       }
@@ -2008,7 +1963,7 @@ public class CubeMetastoreClient {
   /**
    * Drop a storage
    *
-   * @param storageName
+   * @param storageName  storage name
    * @throws HiveException
    */
   public void dropStorage(String storageName) throws HiveException {
@@ -2023,7 +1978,7 @@ public class CubeMetastoreClient {
   /**
    * Drop a cube
    *
-   * @param cubeName
+   * @param cubeName cube name
    * @throws HiveException
    */
   public void dropCube(String cubeName) throws HiveException {
@@ -2053,9 +2008,9 @@ public class CubeMetastoreClient {
   }
 
   /**
-   * Drop a fact with cascade flag
+   * Drop a fact with cascade  flag
    *
-   * @param factName
+   * @param factName fact name
    * @param cascade  If true, will drop all the storages of the fact
    * @throws HiveException
    */
@@ -2077,8 +2032,8 @@ public class CubeMetastoreClient {
   /**
    * Drop a storage from fact
    *
-   * @param factName
-   * @param storage
+   * @param factName fact name
+   * @param storage  storage name
    * @throws HiveException
    */
   public void dropStorageFromFact(String factName, String storage) throws HiveException {
@@ -2103,8 +2058,8 @@ public class CubeMetastoreClient {
   /**
    * Drop a storage from dimension
    *
-   * @param dimTblName
-   * @param storage
+   * @param dimTblName dim table name
+   * @param storage    storage
    * @throws HiveException
    */
   public void dropStorageFromDim(String dimTblName, String storage) throws HiveException {
@@ -2127,7 +2082,7 @@ public class CubeMetastoreClient {
   /**
    * Drop the dimension table
    *
-   * @param dimTblName
+   * @param dimTblName dim table name
    * @param cascade    If true, will drop all the dimension storages
    * @throws HiveException
    */
@@ -2149,9 +2104,9 @@ public class CubeMetastoreClient {
   /**
    * Alter a cubefact with new definition and alter underlying storage tables as well.
    *
-   * @param factTableName
-   * @param cubeFactTable
-   * @param storageTableDescs
+   * @param factTableName     fact table name
+   * @param cubeFactTable     cube fact table
+   * @param storageTableDescs storage table desc objects
    *
    * @throws HiveException
    */
@@ -2187,8 +2142,8 @@ public class CubeMetastoreClient {
   /**
    * Alter dimension table with new dimension definition and underlying storage tables as well
    *
-   * @param dimTableName
-   * @param cubeDimensionTable
+   * @param dimTableName         dim table name
+   * @param cubeDimensionTable   cube dimention table
    * @throws HiveException
    */
   public void alterCubeDimensionTable(String dimTableName, CubeDimensionTable cubeDimensionTable,

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
index f934ad3..1e5ef93 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
@@ -87,7 +87,7 @@ public class FactPartition implements Comparable<FactPartition> {
   }
 
   public String getPartString() {
-    return period.format().format(partSpec);
+    return period.format(partSpec);
   }
 
   public String getFormattedFilter(String tableName) {

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
index e5cf468..4b57d95 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
@@ -25,7 +25,6 @@ import java.text.ParseException;
 import java.util.*;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 
@@ -34,11 +33,11 @@ public class MetastoreUtil {
 
   }
 
-  public static final String getFactOrDimtableStorageTableName(String factName, String storageName) {
+  public static String getFactOrDimtableStorageTableName(String factName, String storageName) {
     return getStorageTableName(factName, Storage.getPrefix(storageName));
   }
 
-  public static final String getStorageTableName(String cubeTableName, String storagePrefix) {
+  public static String getStorageTableName(String cubeTableName, String storagePrefix) {
     return (storagePrefix + cubeTableName).toLowerCase();
   }
 
@@ -46,26 +45,26 @@ public class MetastoreUtil {
     return getStorageEntityPrefix(name) + CLASS_SFX;
   }
 
-  public static final String getStorageEntityPrefix(String storageName) {
+  public static String getStorageEntityPrefix(String storageName) {
     return STORAGE_ENTITY_PFX + storageName.toLowerCase();
   }
 
   // //////////////////////////
   // Dimension properties ///
   // /////////////////////////
-  public static final String getDimPrefix(String dimName) {
+  public static String getDimPrefix(String dimName) {
     return DIMENSION_PFX + dimName.toLowerCase();
   }
 
-  public static final String getDimAttributeListKey(String dimName) {
+  public static String getDimAttributeListKey(String dimName) {
     return getDimPrefix(dimName) + ATTRIBUTES_LIST_SFX;
   }
 
-  public static final String getDimTablePartsKey(String dimtableName) {
+  public static String getDimTablePartsKey(String dimtableName) {
     return DIM_TABLE_PFX + dimtableName + PARTCOLS_SFX;
   }
 
-  public static final String getDimTimedDimensionKey(String dimName) {
+  public static String getDimTimedDimensionKey(String dimName) {
     return getDimPrefix(dimName) + TIMED_DIMENSION_SFX;
   }
 
@@ -76,7 +75,7 @@ public class MetastoreUtil {
     return DIM_KEY_PFX + dimName.toLowerCase();
   }
 
-  public static final String getDimensionClassPropertyKey(String dimName) {
+  public static String getDimensionClassPropertyKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + CLASS_SFX;
   }
 
@@ -84,11 +83,11 @@ public class MetastoreUtil {
     return getDimensionKeyPrefix(name) + INLINE_VALUES_SFX;
   }
 
-  public static final String getDimTypePropertyKey(String dimName) {
+  public static String getDimTypePropertyKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + TYPE_SFX;
   }
 
-  public static final String getDimNumOfDistinctValuesPropertyKey(String dimName) {
+  public static String getDimNumOfDistinctValuesPropertyKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + NUM_DISTINCT_VALUES;
   }
 
@@ -104,15 +103,15 @@ public class MetastoreUtil {
     return Integer.parseInt(param.substring(getHierachyElementKeyPFX(dimName).length()));
   }
 
-  public static final String getDimensionSrcReferenceKey(String dimName) {
+  public static String getDimensionSrcReferenceKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + DIM_REFERS_SFX;
   }
 
-  public static final String getDimRefChainNameKey(String dimName) {
+  public static String getDimRefChainNameKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + CHAIN_NAME_SFX;
   }
 
-  public static final String getDimRefChainColumnKey(String dimName) {
+  public static String getDimRefChainColumnKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + CHAIN_REF_COLUMN_SFX;
   }
 
@@ -120,11 +119,7 @@ public class MetastoreUtil {
     return getDimensionKeyPrefix(dimName) + IS_JOIN_KEY_SFX;
   }
 
-  public static final String getDimensionDestReference(String tableName, String columnName) {
-    return tableName.toLowerCase() + TABLE_COLUMN_SEPERATOR + columnName.toLowerCase();
-  }
-
-  public static final String getReferencesString(List<TableReference> references) {
+  public static String getReferencesString(List<TableReference> references) {
     String[] toks = new String[references.size()];
 
     for (int i = 0; i < references.size(); i++) {
@@ -150,10 +145,12 @@ public class MetastoreUtil {
   public static String getCubeColEndTimePropertyKey(String colName) {
     return getColumnKeyPrefix(colName) + END_TIME_SFX;
   }
-  public static String getStoragetableStartTimesKey(){
+
+  public static String getStoragetableStartTimesKey() {
     return STORAGE_PFX + "start.times";
   }
-  public static String getStoragetableEndTimesKey(){
+
+  public static String getStoragetableEndTimesKey() {
     return STORAGE_PFX + "end.times";
   }
 
@@ -169,15 +166,15 @@ public class MetastoreUtil {
     return getColumnKeyPrefix(colName) + DISPLAY_SFX;
   }
 
-  public static final String getExprColumnKey(String colName) {
+  public static String getExprColumnKey(String colName) {
     return getColumnKeyPrefix(colName) + EXPR_SFX;
   }
 
-  public static final String getExprTypePropertyKey(String colName) {
+  public static String getExprTypePropertyKey(String colName) {
     return getColumnKeyPrefix(colName) + TYPE_SFX;
   }
 
-  public static final String getExprEncodingPropertyKey(String colName) {
+  public static String getExprEncodingPropertyKey(String colName) {
     return getExprColumnKey(colName) + BASE64_SFX;
   }
 
@@ -247,78 +244,78 @@ public class MetastoreUtil {
   // //////////////////////////
   // Measure properties ///
   // /////////////////////////
-  public static final String getMeasurePrefix(String measureName) {
+  public static String getMeasurePrefix(String measureName) {
     return MEASURE_KEY_PFX + measureName.toLowerCase();
   }
 
-  public static final String getMeasureClassPropertyKey(String measureName) {
+  public static String getMeasureClassPropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + CLASS_SFX;
   }
 
-  public static final String getMeasureUnitPropertyKey(String measureName) {
+  public static String getMeasureUnitPropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + UNIT_SFX;
   }
 
-  public static final String getMeasureTypePropertyKey(String measureName) {
+  public static String getMeasureTypePropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + TYPE_SFX;
   }
 
-  public static final String getMeasureFormatPropertyKey(String measureName) {
+  public static String getMeasureFormatPropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + FORMATSTRING_SFX;
   }
 
-  public static final String getMeasureAggrPropertyKey(String measureName) {
+  public static String getMeasureAggrPropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + AGGR_SFX;
   }
 
-  public static final String getMeasureMinPropertyKey(String measureName) {
+  public static String getMeasureMinPropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + MIN_SFX;
   }
 
-  public static final String getMeasureMaxPropertyKey(String measureName) {
+  public static String getMeasureMaxPropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + MAX_SFX;
   }
 
-  public static final String getExpressionListKey(String name) {
+  public static String getExpressionListKey(String name) {
     return getBasePrefix(name) + EXPRESSIONS_LIST_SFX;
   }
 
   // //////////////////////////
   // Cube properties ///
   // /////////////////////////
-  public static final String getBasePrefix(String base) {
+  public static String getBasePrefix(String base) {
     return BASE_KEY_PFX + base.toLowerCase();
   }
 
-  public static final String getCubePrefix(String cubeName) {
+  public static String getCubePrefix(String cubeName) {
     return CUBE_KEY_PFX + cubeName.toLowerCase();
   }
 
-  public static final String getCubeMeasureListKey(String cubeName) {
+  public static String getCubeMeasureListKey(String cubeName) {
     return getCubePrefix(cubeName) + MEASURES_LIST_SFX;
   }
 
-  public static final String getCubeDimensionListKey(String cubeName) {
+  public static String getCubeDimensionListKey(String cubeName) {
     return getCubePrefix(cubeName) + DIMENSIONS_LIST_SFX;
   }
 
-  public static final String getCubeTimedDimensionListKey(String cubeName) {
+  public static String getCubeTimedDimensionListKey(String cubeName) {
     return getCubePrefix(cubeName) + TIMED_DIMENSIONS_LIST_SFX;
   }
 
-  public static final String getCubeJoinChainListKey(String cubeName) {
+  public static String getCubeJoinChainListKey(String cubeName) {
     return getCubePrefix(cubeName) + JOIN_CHAIN_LIST_SFX;
   }
 
-  public static final String getDimensionJoinChainListKey(String dimName) {
+  public static String getDimensionJoinChainListKey(String dimName) {
     return getDimPrefix(dimName) + JOIN_CHAIN_LIST_SFX;
   }
 
-  public static final String getParentCubeNameKey(String cubeName) {
+  public static String getParentCubeNameKey(String cubeName) {
     return getCubePrefix(cubeName) + PARENT_CUBE_SFX;
   }
 
-  public static final String getCubeTableKeyPrefix(String tableName) {
+  public static String getCubeTableKeyPrefix(String tableName) {
     return CUBE_TABLE_PFX + tableName.toLowerCase();
   }
 
@@ -350,7 +347,7 @@ public class MetastoreUtil {
   }
 
   public static String getLatestPartTimestampKey(String partCol) {
-    return MetastoreConstants.STORAGE_PFX + partCol + MetastoreConstants.LATEST_PART_TIMESTAMP_SFX;
+    return STORAGE_PFX + partCol + LATEST_PART_TIMESTAMP_SFX;
   }
 
   // //////////////////////////
@@ -362,16 +359,15 @@ public class MetastoreUtil {
     }
     String sep = "";
     StringBuilder valueStr = new StringBuilder();
-    Iterator<E> it = set.iterator();
-    while (it.hasNext()) {
-      valueStr.append(sep).append(it.next().getName());
+    for (E aSet : set) {
+      valueStr.append(sep).append(aSet.getName());
       sep = ",";
     }
     return valueStr.toString();
   }
 
   static <E extends Named> List<String> getNamedStrs(Collection<E> set, int maxLength) {
-    List<String> namedStrings = new ArrayList<String>();
+    List<String> namedStrings = new ArrayList<>();
     if (set == null || set.isEmpty()) {
       return namedStrings;
     }
@@ -396,10 +392,10 @@ public class MetastoreUtil {
     return namedStrings;
   }
 
-  private static int maxParamLength = 3999;
+  private static final int MAX_PARAM_LENGTH = 3999;
 
   public static <E extends Named> void addNameStrings(Map<String, String> props, String key, Collection<E> set) {
-    addNameStrings(props, key, set, maxParamLength);
+    addNameStrings(props, key, set, MAX_PARAM_LENGTH);
   }
 
   static <E extends Named> void addNameStrings(Map<String, String> props, String key,
@@ -454,15 +450,6 @@ public class MetastoreUtil {
     return valueStr.toString();
   }
 
-  public static Set<String> getColumnNames(AbstractCubeTable table) {
-    List<FieldSchema> fields = table.getColumns();
-    Set<String> columns = new HashSet<String>(fields.size());
-    for (FieldSchema f : fields) {
-      columns.add(f.getName().toLowerCase());
-    }
-    return columns;
-  }
-
   public static void addColumnNames(CubeDimAttribute dim, Set<String> cols) {
     if (dim instanceof HierarchicalDimAttribute) {
       HierarchicalDimAttribute h = (HierarchicalDimAttribute) dim;
@@ -486,6 +473,16 @@ public class MetastoreUtil {
     return STORAGE_PFX + PARTITION_TIMELINE_CACHE + "present";
   }
 
+  public static void filterPartitionsByUpdatePeriod(List<Partition> partitions, UpdatePeriod updatePeriod) {
+    Iterator<Partition> iter = partitions.iterator();
+    while (iter.hasNext()) {
+      Partition part = iter.next();
+      if (!UpdatePeriod.valueOf(part.getParameters().get(PARTITION_UPDATE_PERIOD)).equals(updatePeriod)) {
+        iter.remove();
+      }
+    }
+  }
+
   public static List<Partition> filterPartitionsByNonTimeParts(List<Partition> partitions,
     Map<String, String> nonTimePartSpec,
     String latestPartCol) {
@@ -497,7 +494,7 @@ public class MetastoreUtil {
         if ((nonTimePartSpec == null || !nonTimePartSpec.containsKey(entry1.getKey()))
           && !entry1.getKey().equals(latestPartCol)) {
           try {
-            UpdatePeriod.valueOf(part.getParameters().get(MetastoreConstants.PARTITION_UPDATE_PERIOD))
+            UpdatePeriod.valueOf(part.getParameters().get(PARTITION_UPDATE_PERIOD))
               .format()
               .parse(entry1.getValue());
           } catch (ParseException e) {
@@ -505,7 +502,6 @@ public class MetastoreUtil {
           }
         }
       }
-
       if (ignore) {
         iter.remove();
       }
@@ -513,13 +509,13 @@ public class MetastoreUtil {
     return partitions;
   }
 
-  public static Date getLatestTimeStampOfDimtable(Partition part, String partCol) throws HiveException {
+  public static Date getLatestTimeStampFromPartition(Partition part, String partCol) throws HiveException {
     if (part != null) {
       String latestTimeStampStr = part.getParameters().get(MetastoreUtil.getLatestPartTimestampKey(partCol));
-      String latestPartUpdatePeriod = part.getParameters().get(MetastoreConstants.PARTITION_UPDATE_PERIOD);
+      String latestPartUpdatePeriod = part.getParameters().get(PARTITION_UPDATE_PERIOD);
       UpdatePeriod latestUpdatePeriod = UpdatePeriod.valueOf(latestPartUpdatePeriod.toUpperCase());
       try {
-        return latestUpdatePeriod.format().parse(latestTimeStampStr);
+        return latestUpdatePeriod.parse(latestTimeStampStr);
       } catch (ParseException e) {
         throw new HiveException(e);
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
index 437227c..9318603 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
@@ -300,7 +300,7 @@ public abstract class Storage extends AbstractCubeTable implements PartitionMeta
             .get(addPartitionDesc.getNonTimePartSpec()).latestParts.entrySet()) {
             if (addPartitionDesc.getTimePartSpec().containsKey(entry.getKey())
               && entry.getValue().get(MetastoreUtil.getLatestPartTimestampKey(entry.getKey())).equals(
-                updatePeriod.format().format(addPartitionDesc.getTimePartSpec().get(entry.getKey())))) {
+                updatePeriod.format(addPartitionDesc.getTimePartSpec().get(entry.getKey())))) {
               if (latestPartIndexForPartCols.get(addPartitionDesc.getNonTimePartSpec()) == null) {
                 latestPartIndexForPartCols.put(addPartitionDesc.getNonTimePartSpec(),
                   Maps.<String, Integer>newHashMap());

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/StoragePartitionDesc.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/StoragePartitionDesc.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/StoragePartitionDesc.java
index 044425b..b99fef2 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/StoragePartitionDesc.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/StoragePartitionDesc.java
@@ -73,7 +73,7 @@ public class StoragePartitionDesc extends AddPartitionDesc.OnePartitionDesc {
     if (fullPartSpec == null) {
       fullPartSpec = new HashMap<String, String>();
       for (Map.Entry<String, Date> entry : timePartSpec.entrySet()) {
-        fullPartSpec.put(entry.getKey(), updatePeriod.format().format(entry.getValue()));
+        fullPartSpec.put(entry.getKey(), updatePeriod.format(entry.getValue()));
       }
       if (nonTimePartSpec != null) {
         fullPartSpec.putAll(nonTimePartSpec);

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageTableDesc.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageTableDesc.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageTableDesc.java
index 0a2c5df..1e276df 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageTableDesc.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageTableDesc.java
@@ -19,10 +19,12 @@
 
 package org.apache.lens.cube.metadata;
 
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
 
 public class StorageTableDesc extends CreateTableDesc {
@@ -45,6 +47,25 @@ public class StorageTableDesc extends CreateTableDesc {
     super.getTblProps().put(MetastoreConstants.TIME_PART_COLUMNS, StringUtils.join(this.timePartCols, ','));
   }
 
+  public StorageTableDesc() {
+  }
+
+  public StorageTableDesc(Class<?> inputFormatClass, Class<?> outputFormatClass,
+    ArrayList<FieldSchema> partCols, List<String> timePartCols) {
+    if (inputFormatClass != null) {
+      setInputFormat(inputFormatClass.getCanonicalName());
+    }
+    if (outputFormatClass != null) {
+      setOutputFormat(outputFormatClass.getCanonicalName());
+    }
+    if (partCols != null) {
+      setPartCols(partCols);
+    }
+    if (timePartCols != null) {
+      setTimePartCols(timePartCols);
+    }
+  }
+
   /**
    * @deprecated
    */

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
index 0026262..80295b1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
@@ -38,7 +38,7 @@ public class TimePartition implements Comparable<TimePartition>, Named {
   private TimePartition(@NonNull UpdatePeriod updatePeriod, @NonNull Date date) {
     this.updatePeriod = updatePeriod;
     this.date = updatePeriod.truncate(date);
-    this.dateString = updatePeriod.format().format(this.date);
+    this.dateString = updatePeriod.format(this.date);
   }
 
   public static TimePartition of(UpdatePeriod updatePeriod, Date date) throws LensException {
@@ -56,7 +56,7 @@ public class TimePartition implements Comparable<TimePartition>, Named {
         throw new LensException(getWrongUpdatePeriodMessage(updatePeriod, dateString));
       }
       try {
-        return TimePartition.of(updatePeriod, updatePeriod.format().parse(dateString));
+        return TimePartition.of(updatePeriod, updatePeriod.parse(dateString));
       } catch (ParseException e) {
         throw new LensException(getWrongUpdatePeriodMessage(updatePeriod, dateString), e);
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
index f192463..4c76a69 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
@@ -22,17 +22,22 @@ package org.apache.lens.cube.metadata;
 import static java.util.Calendar.*;
 
 import java.text.DateFormat;
+import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Comparator;
 import java.util.Date;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.parse.DateUtil;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang3.time.DateUtils;
 
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
 import lombok.Getter;
 
 public enum UpdatePeriod implements Named {
@@ -181,10 +186,6 @@ public enum UpdatePeriod implements Named {
     return this.weight;
   }
 
-  public long monthWeight(Date date) {
-    return DateUtil.getNumberofDaysInMonth(date) * DAILY.weight();
-  }
-
   public static UpdatePeriod fromUnitName(String unitName) throws LensException {
     for (UpdatePeriod up : values()) {
       if (up.getUnitName().equals(unitName)) {
@@ -219,6 +220,37 @@ public enum UpdatePeriod implements Named {
     }
   }
 
+  Cache<Date, String> dateToStringCache = CacheBuilder.newBuilder()
+    .expireAfterWrite(2, TimeUnit.HOURS).maximumSize(100).build();
+  Cache<String, Date> stringToDateCache = CacheBuilder.newBuilder()
+    .expireAfterWrite(2, TimeUnit.HOURS).maximumSize(100).build();
+
+  public String format(final Date date) {
+    try {
+      return dateToStringCache.get(date, new Callable<String>() {
+        @Override
+        public String call() {
+          return format().format(date);
+        }
+      });
+    } catch (ExecutionException e) {
+      return format().format(date);
+    }
+  }
+
+  public Date parse(final String dateString) throws ParseException {
+    try {
+      return stringToDateCache.get(dateString, new Callable<Date>() {
+        @Override
+        public Date call() throws Exception {
+          return format().parse(dateString);
+        }
+      });
+    } catch (ExecutionException e) {
+      return format().parse(dateString);
+    }
+  }
+
   public String formatStr() {
     return this.format;
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
index 4690d1d..5e17eac 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
@@ -45,7 +45,6 @@ public final class DateUtil {
   private DateUtil() {
 
   }
-
   /*
    * NOW -> new java.util.Date() NOW-7DAY -> a date one week earlier NOW (+-)
    * <NUM>UNIT or Hardcoded dates in DD-MM-YYYY hh:mm:ss,sss
@@ -68,7 +67,6 @@ public final class DateUtil {
 
   public static final String WSPACE = "\\s+";
   public static final String OPTIONAL_WSPACE = "\\s*";
-  public static final Pattern P_WSPACE = Pattern.compile(WSPACE);
 
   public static final String SIGNAGE = "\\+|\\-";
   public static final Pattern P_SIGNAGE = Pattern.compile(SIGNAGE);
@@ -108,10 +106,6 @@ public final class DateUtil {
       }
     };
 
-  public static String formatDate(Date dt) {
-    return ABSDATE_PARSER.get().format(dt);
-  }
-
   public static String getAbsDateFormatString(String str) {
     if (str.matches(YEAR_FMT)) {
       return str + "-01-01-00:00:00,000";
@@ -178,7 +172,6 @@ public final class DateUtil {
 
       Matcher granularityMatcher = P_UNIT.matcher(nowWithGranularity);
       if (granularityMatcher.find()) {
-        String unit = granularityMatcher.group().toLowerCase();
         calendar = UpdatePeriod.fromUnitName(granularityMatcher.group().toLowerCase()).truncate(calendar);
       }
     }
@@ -269,12 +262,6 @@ public final class DateUtil {
     return cal.getTime();
   }
 
-  public static int getNumberofDaysInMonth(Date date) {
-    Calendar calendar = Calendar.getInstance();
-    calendar.setTime(date);
-    return calendar.getActualMaximum(DAY_OF_MONTH);
-  }
-
   public static CoveringInfo getMonthlyCoveringInfo(Date from, Date to) {
     // Move 'from' to end of month, unless its the first day of month
     boolean coverable = true;


[17/50] [abbrv] lens git commit: LENS-851 : Disable union for multi fact queries

Posted by sh...@apache.org.
LENS-851 : Disable union for multi fact queries


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/7a3a1734
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/7a3a1734
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/7a3a1734

Branch: refs/heads/LENS-581
Commit: 7a3a1734e17898240f9be3812ab886d46718f0e3
Parents: 73716cb
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Wed Nov 25 10:20:16 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Nov 25 10:20:16 2015 +0530

----------------------------------------------------------------------
 .../java/org/apache/lens/cube/parse/MultiFactHQLContext.java   | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/7a3a1734/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
index 2fcea8b..113d8de 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
@@ -94,7 +94,7 @@ class MultiFactHQLContext extends SimpleHQLContext {
     for (int i = 0; i < query.getSelectAST().getChildCount(); i++) {
       if (selectToFactIndex.get(i) == null) {
         throw new LensException(LensCubeErrorCode.EXPRESSION_NOT_IN_ANY_FACT.getLensErrorInfo(),
-            HQLParser.getString((ASTNode) query.getSelectAST().getChild(i)));
+          HQLParser.getString((ASTNode) query.getSelectAST().getChild(i)));
       }
       if (selectToFactIndex.get(i).size() == 1) {
         select.append("mq").append(selectToFactIndex.get(i).get(0)).append(".")
@@ -130,6 +130,10 @@ class MultiFactHQLContext extends SimpleHQLContext {
     Iterator<CandidateFact> iter = facts.iterator();
     while (iter.hasNext()) {
       CandidateFact fact = iter.next();
+      if (fact.getStorageTables().size() > 1) {
+        // Not supported right now.
+        throw new LensException(LensCubeErrorCode.STORAGE_UNION_DISABLED.getLensErrorInfo());
+      }
       FactHQLContext facthql = new FactHQLContext(fact, dimsToQuery, factDimMap.get(fact), query);
       fromBuilder.append("(");
       fromBuilder.append(facthql.toHQL());


[37/50] [abbrv] lens git commit: LENS-885: Cleanup of Cube test cases

Posted by sh...@apache.org.
LENS-885: Cleanup of Cube test cases


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/7c7c86da
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/7c7c86da
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/7c7c86da

Branch: refs/heads/LENS-581
Commit: 7c7c86daed2e9907bda92f5ed29e73ed99a9a726
Parents: 7e9e47e
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Fri Dec 11 18:40:59 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Fri Dec 11 18:41:00 2015 +0530

----------------------------------------------------------------------
 .../apache/lens/cube/metadata/CubeColumn.java   |   1 -
 .../lens/cube/metadata/CubeFactTable.java       |   1 -
 .../org/apache/lens/cube/metadata/DateUtil.java | 396 ++++++++++++++++
 .../lens/cube/metadata/TimePartitionRange.java  |   1 -
 .../apache/lens/cube/metadata/TimeRange.java    | 219 +++++++++
 .../apache/lens/cube/metadata/UpdatePeriod.java |  84 +++-
 .../timeline/EndsAndHolesPartitionTimeline.java |   2 +-
 .../apache/lens/cube/parse/CandidateFact.java   |   5 +-
 .../cube/parse/CandidateTablePruneCause.java    |   2 +
 .../org/apache/lens/cube/parse/DateUtil.java    | 456 ------------------
 .../lens/cube/parse/ExpressionResolver.java     |  11 +-
 .../lens/cube/parse/SingleFactHQLContext.java   |   2 +-
 .../lens/cube/parse/StorageTableResolver.java   |   2 +-
 .../org/apache/lens/cube/parse/TimeRange.java   | 220 ---------
 .../lens/cube/parse/TimerangeResolver.java      |   5 +-
 .../lens/cube/metadata/CubeFactTableTest.java   |   1 -
 .../apache/lens/cube/metadata/DateFactory.java  | 196 ++++++++
 .../cube/metadata/TestCubeMetastoreClient.java  | 115 ++---
 .../apache/lens/cube/metadata/TestDateUtil.java | 297 ++++++++++++
 .../apache/lens/cube/parse/CubeTestSetup.java   | 191 ++------
 .../FieldsCannotBeQueriedTogetherTest.java      |   8 +-
 .../lens/cube/parse/TestAggregateResolver.java  |   1 +
 .../lens/cube/parse/TestBaseCubeQueries.java    |   5 +-
 .../cube/parse/TestBetweenTimeRangeWriter.java  |  25 +-
 .../lens/cube/parse/TestCubeRewriter.java       | 459 +++++++++----------
 .../apache/lens/cube/parse/TestDateUtil.java    | 299 ------------
 .../cube/parse/TestDenormalizationResolver.java |  28 +-
 .../lens/cube/parse/TestExpressionContext.java  |   4 +-
 .../lens/cube/parse/TestExpressionResolver.java |   1 +
 .../lens/cube/parse/TestJoinResolver.java       |   1 +
 .../lens/cube/parse/TestORTimeRangeWriter.java  |  40 +-
 .../lens/cube/parse/TestQueryMetrics.java       |   2 +-
 .../lens/cube/parse/TestRewriterPlan.java       |   2 +-
 .../apache/lens/cube/parse/TestStorageUtil.java |  98 ++--
 .../lens/cube/parse/TestTimeRangeExtractor.java |  33 +-
 .../lens/cube/parse/TestTimeRangeResolver.java  |   2 +-
 .../lens/cube/parse/TestTimeRangeWriter.java    |  48 +-
 .../parse/TestTimeRangeWriterWithQuery.java     | 134 +++---
 .../lens/server/query/QueryResultPurger.java    |   2 +-
 39 files changed, 1693 insertions(+), 1706 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java
index a2a00d2..b04532f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeColumn.java
@@ -24,7 +24,6 @@ import java.util.Date;
 import java.util.Map;
 import java.util.TimeZone;
 
-import org.apache.lens.cube.parse.TimeRange;
 
 import com.google.common.base.Optional;
 

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
index d6bfb79..dd0adb7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
@@ -21,7 +21,6 @@ package org.apache.lens.cube.metadata;
 import java.util.*;
 
 import org.apache.lens.cube.metadata.UpdatePeriod.UpdatePeriodComparator;
-import org.apache.lens.cube.parse.DateUtil;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
new file mode 100644
index 0000000..b76c567
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
@@ -0,0 +1,396 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata;
+
+import static java.util.Calendar.MONTH;
+
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.time.DateUtils;
+
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+import lombok.Data;
+import lombok.EqualsAndHashCode;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public final class DateUtil {
+  private DateUtil() {
+
+  }
+
+  /*
+   * NOW -> new java.util.Date() NOW-7DAY -> a date one week earlier NOW (+-)
+   * <NUM>UNIT or Hardcoded dates in DD-MM-YYYY hh:mm:ss,sss
+   */
+  public static final String UNIT;
+
+  static {
+    StringBuilder sb = new StringBuilder();
+    String sep = "";
+    for (UpdatePeriod up : UpdatePeriod.values()) {
+      sb.append(sep).append(up.getUnitName());
+      sep = "|";
+    }
+    UNIT = sb.toString();
+  }
+
+  public static final String GRANULARITY = "\\.(" + UNIT + ")";
+  public static final String RELATIVE = "(now)(" + GRANULARITY + ")?";
+  public static final Pattern P_RELATIVE = Pattern.compile(RELATIVE, Pattern.CASE_INSENSITIVE);
+
+  public static final String WSPACE = "\\s+";
+  public static final String OPTIONAL_WSPACE = "\\s*";
+
+  public static final String SIGNAGE = "\\+|\\-";
+  public static final Pattern P_SIGNAGE = Pattern.compile(SIGNAGE);
+
+  public static final String QUANTITY = "\\d+";
+  public static final Pattern P_QUANTITY = Pattern.compile(QUANTITY);
+
+  public static final Pattern P_UNIT = Pattern.compile(UNIT, Pattern.CASE_INSENSITIVE);
+
+  public static final String RELDATE_VALIDATOR_STR = RELATIVE + OPTIONAL_WSPACE + "((" + SIGNAGE + ")" + "("
+    + WSPACE + ")?" + "(" + QUANTITY + ")" + OPTIONAL_WSPACE + "(" + UNIT + "))?" + "(s?)";
+
+  public static final Pattern RELDATE_VALIDATOR = Pattern.compile(RELDATE_VALIDATOR_STR, Pattern.CASE_INSENSITIVE);
+
+  public static final String YEAR_FMT = "[0-9]{4}";
+  public static final String MONTH_FMT = YEAR_FMT + "-[0-9]{2}";
+  public static final String DAY_FMT = MONTH_FMT + "-[0-9]{2}";
+  public static final String HOUR_FMT = DAY_FMT + "-[0-9]{2}";
+  public static final String MINUTE_FMT = HOUR_FMT + ":[0-9]{2}";
+  public static final String SECOND_FMT = MINUTE_FMT + ":[0-9]{2}";
+  public static final String MILLISECOND_FMT = SECOND_FMT + ",[0-9]{3}";
+  public static final String ABSDATE_FMT = "yyyy-MM-dd-HH:mm:ss,SSS";
+  public static final String HIVE_QUERY_DATE_FMT = "yyyy-MM-dd HH:mm:ss";
+
+  public static final ThreadLocal<DateFormat> ABSDATE_PARSER =
+    new ThreadLocal<DateFormat>() {
+      @Override
+      protected SimpleDateFormat initialValue() {
+        return new SimpleDateFormat(ABSDATE_FMT);
+      }
+    };
+  public static final ThreadLocal<DateFormat> HIVE_QUERY_DATE_PARSER =
+    new ThreadLocal<DateFormat>() {
+      @Override
+      protected SimpleDateFormat initialValue() {
+        return new SimpleDateFormat(HIVE_QUERY_DATE_FMT);
+      }
+    };
+
+  public static String getAbsDateFormatString(String str) {
+    if (str.matches(YEAR_FMT)) {
+      return str + "-01-01-00:00:00,000";
+    } else if (str.matches(MONTH_FMT)) {
+      return str + "-01-00:00:00,000";
+    } else if (str.matches(DAY_FMT)) {
+      return str + "-00:00:00,000";
+    } else if (str.matches(HOUR_FMT)) {
+      return str + ":00:00,000";
+    } else if (str.matches(MINUTE_FMT)) {
+      return str + ":00,000";
+    } else if (str.matches(SECOND_FMT)) {
+      return str + ",000";
+    } else if (str.matches(MILLISECOND_FMT)) {
+      return str;
+    }
+    throw new IllegalArgumentException("Unsupported formatting for date" + str);
+  }
+
+  public static Date resolveDate(String str, Date now) throws LensException {
+    if (RELDATE_VALIDATOR.matcher(str).matches()) {
+      return resolveRelativeDate(str, now);
+    } else {
+      return resolveAbsoluteDate(str);
+    }
+  }
+
+  public static String relativeToAbsolute(String relative) throws LensException {
+    return relativeToAbsolute(relative, new Date());
+  }
+
+  public static String relativeToAbsolute(String relative, Date now) throws LensException {
+    if (RELDATE_VALIDATOR.matcher(relative).matches()) {
+      return ABSDATE_PARSER.get().format(resolveRelativeDate(relative, now));
+    } else {
+      return relative;
+    }
+  }
+
+  static Cache<String, Date> stringToDateCache = CacheBuilder.newBuilder()
+    .expireAfterWrite(2, TimeUnit.HOURS).maximumSize(100).build();
+
+  public static Date resolveAbsoluteDate(final String str) throws LensException {
+    try {
+      return stringToDateCache.get(str, new Callable<Date>() {
+        @Override
+        public Date call() throws ParseException {
+          return ABSDATE_PARSER.get().parse(getAbsDateFormatString(str));
+        }
+      });
+    } catch (Exception e) {
+      log.error("Invalid date format. expected only {} date provided:{}", ABSDATE_FMT, str, e);
+      throw new LensException(LensCubeErrorCode.WRONG_TIME_RANGE_FORMAT.getLensErrorInfo(), ABSDATE_FMT, str);
+    }
+  }
+
+  public static Date resolveRelativeDate(String str, Date now) throws LensException {
+    if (StringUtils.isBlank(str)) {
+      throw new LensException(LensCubeErrorCode.NULL_DATE_VALUE.getLensErrorInfo());
+    }
+
+    // Resolve NOW with proper granularity
+    Calendar calendar = Calendar.getInstance();
+    calendar.setTime(now);
+
+    str = str.toLowerCase();
+    Matcher relativeMatcher = P_RELATIVE.matcher(str);
+    if (relativeMatcher.find()) {
+      String nowWithGranularity = relativeMatcher.group();
+      nowWithGranularity = nowWithGranularity.replaceAll("now", "");
+      nowWithGranularity = nowWithGranularity.replaceAll("\\.", "");
+
+      Matcher granularityMatcher = P_UNIT.matcher(nowWithGranularity);
+      if (granularityMatcher.find()) {
+        calendar = UpdatePeriod.fromUnitName(granularityMatcher.group().toLowerCase()).truncate(calendar);
+      }
+    }
+
+    // Get rid of 'now' part and whitespace
+    String diffStr = str.replaceAll(RELATIVE, "").replace(WSPACE, "");
+    TimeDiff diff = TimeDiff.parseFrom(diffStr);
+    return diff.offsetFrom(calendar.getTime());
+  }
+
+  public static Date getCeilDate(Date date, UpdatePeriod interval) {
+    return interval.getCeilDate(date);
+  }
+
+  public static Date getFloorDate(Date date, UpdatePeriod interval) {
+    return interval.getFloorDate(date);
+  }
+
+  public static CoveringInfo getMonthlyCoveringInfo(Date from, Date to) {
+    // Move 'from' to end of month, unless its the first day of month
+    boolean coverable = true;
+    if (!from.equals(DateUtils.truncate(from, MONTH))) {
+      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
+      coverable = false;
+    }
+
+    // Move 'to' to beginning of next month, unless its the first day of the month
+    if (!to.equals(DateUtils.truncate(to, MONTH))) {
+      to = DateUtils.truncate(to, MONTH);
+      coverable = false;
+    }
+
+    int months = 0;
+    while (from.before(to)) {
+      from = DateUtils.addMonths(from, 1);
+      months++;
+    }
+    return new CoveringInfo(months, coverable);
+  }
+
+  public static CoveringInfo getQuarterlyCoveringInfo(Date from, Date to) {
+    CoveringInfo monthlyCoveringInfo = getMonthlyCoveringInfo(from, to);
+    if (monthlyCoveringInfo.getCountBetween() < 3) {
+      return new CoveringInfo(0, false);
+    }
+    boolean coverable = monthlyCoveringInfo.isCoverable();
+    if (!from.equals(DateUtils.truncate(from, MONTH))) {
+      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
+      coverable = false;
+    }
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(from);
+    int fromMonth = cal.get(MONTH);
+
+    // Get the start date of the quarter
+    int beginOffset = (3 - fromMonth % 3) % 3;
+    int endOffset = (monthlyCoveringInfo.getCountBetween() - beginOffset) % 3;
+    if (beginOffset > 0 || endOffset > 0) {
+      coverable = false;
+    }
+    return new CoveringInfo((monthlyCoveringInfo.getCountBetween() - beginOffset - endOffset) / 3, coverable);
+  }
+
+
+  public static CoveringInfo getYearlyCoveringInfo(Date from, Date to) {
+    CoveringInfo monthlyCoveringInfo = getMonthlyCoveringInfo(from, to);
+    if (monthlyCoveringInfo.getCountBetween() < 12) {
+      return new CoveringInfo(0, false);
+    }
+    boolean coverable = monthlyCoveringInfo.isCoverable();
+    if (!from.equals(DateUtils.truncate(from, MONTH))) {
+      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
+      coverable = false;
+    }
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(from);
+    int fromMonth = cal.get(MONTH);
+    int beginOffset = (12 - fromMonth % 12) % 12;
+    int endOffset = (monthlyCoveringInfo.getCountBetween() - beginOffset) % 12;
+    if (beginOffset > 0 || endOffset > 0) {
+      coverable = false;
+    }
+    return new CoveringInfo((monthlyCoveringInfo.getCountBetween() - beginOffset - endOffset) / 12, coverable);
+  }
+
+  public static CoveringInfo getWeeklyCoveringInfo(Date from, Date to) {
+    int dayDiff = 0;
+    Date tmpFrom = from;
+    while (tmpFrom.before(to)) {
+      tmpFrom = DateUtils.addDays(tmpFrom, 1);
+      dayDiff++;
+    }
+
+    if (dayDiff < 7) {
+      return new CoveringInfo(0, false);
+    }
+
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(from);
+    int fromDay = cal.get(Calendar.DAY_OF_WEEK);
+    cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
+    Date fromWeekStartDate = cal.getTime();
+    boolean coverable = dayDiff % 7 == 0;
+    if (fromWeekStartDate.before(from)) {
+      // Count from the start of next week
+      dayDiff -= (cal.getActualMaximum(Calendar.DAY_OF_WEEK) - (fromDay - Calendar.SUNDAY));
+      coverable = false;
+    }
+
+    return new CoveringInfo(dayDiff / 7, coverable);
+  }
+
+  static CoveringInfo getCoveringInfo(Date from, Date to, UpdatePeriod interval) {
+    switch (interval) {
+    case SECONDLY:
+    case CONTINUOUS:
+      return getMilliSecondCoveringInfo(from, to, 1000);
+    case MINUTELY:
+    case HOURLY:
+    case DAILY:
+      return getMilliSecondCoveringInfo(from, to, interval.weight());
+    case WEEKLY:
+      return getWeeklyCoveringInfo(from, to);
+    case MONTHLY:
+      return getMonthlyCoveringInfo(from, to);
+    case QUARTERLY:
+      return getQuarterlyCoveringInfo(from, to);
+    case YEARLY:
+      return getYearlyCoveringInfo(from, to);
+    default:
+      return new CoveringInfo(0, false);
+    }
+  }
+
+  private static CoveringInfo getMilliSecondCoveringInfo(Date from, Date to, long millisInInterval) {
+    long diff = to.getTime() - from.getTime();
+    return new CoveringInfo((int) (diff / millisInInterval), diff % millisInInterval == 0);
+  }
+
+  static boolean isCoverableBy(Date from, Date to, Set<UpdatePeriod> intervals) {
+    for (UpdatePeriod period : intervals) {
+      if (getCoveringInfo(from, to, period).isCoverable()) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  public static int getTimeDiff(Date fromDate, Date toDate, UpdatePeriod updatePeriod) {
+    if (fromDate.before(toDate)) {
+      return getCoveringInfo(fromDate, toDate, updatePeriod).getCountBetween();
+    } else {
+      return -getCoveringInfo(toDate, fromDate, updatePeriod).getCountBetween();
+    }
+  }
+
+  @Data
+  public static class CoveringInfo {
+    int countBetween;
+    boolean coverable;
+
+    public CoveringInfo(int countBetween, boolean coverable) {
+      this.countBetween = countBetween;
+      this.coverable = coverable;
+    }
+  }
+
+  @EqualsAndHashCode
+  public static class TimeDiff {
+    int quantity;
+    UpdatePeriod updatePeriod;
+
+    private TimeDiff(int quantity, UpdatePeriod updatePeriod) {
+      this.quantity = quantity;
+      this.updatePeriod = updatePeriod;
+    }
+
+    public static TimeDiff parseFrom(String diffStr) throws LensException {
+      // Get the relative diff part to get eventual date based on now.
+      Matcher qtyMatcher = P_QUANTITY.matcher(diffStr);
+      int qty = 1;
+      if (qtyMatcher.find()) {
+        qty = Integer.parseInt(qtyMatcher.group());
+      }
+
+      Matcher signageMatcher = P_SIGNAGE.matcher(diffStr);
+      if (signageMatcher.find()) {
+        String sign = signageMatcher.group();
+        if ("-".equals(sign)) {
+          qty = -qty;
+        }
+      }
+
+      Matcher unitMatcher = P_UNIT.matcher(diffStr);
+      if (unitMatcher.find()) {
+        return new TimeDiff(qty, UpdatePeriod.fromUnitName(unitMatcher.group().toLowerCase()));
+      }
+      return new TimeDiff(0, UpdatePeriod.CONTINUOUS);
+    }
+
+    public Date offsetFrom(Date time) {
+      return DateUtils.add(time, updatePeriod.calendarField(), quantity);
+    }
+
+    public Date negativeOffsetFrom(Date time) {
+      return DateUtils.add(time, updatePeriod.calendarField(), -quantity);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
index 01069a5..2e85111 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
@@ -21,7 +21,6 @@ package org.apache.lens.cube.metadata;
 import java.util.Date;
 import java.util.Iterator;
 
-import org.apache.lens.cube.parse.DateUtil;
 import org.apache.lens.server.api.error.LensException;
 
 import lombok.Data;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimeRange.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimeRange.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimeRange.java
new file mode 100644
index 0000000..bf6cc5c
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimeRange.java
@@ -0,0 +1,219 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata;
+
+import static org.apache.lens.cube.metadata.DateUtil.ABSDATE_PARSER;
+
+import java.util.Calendar;
+import java.util.Date;
+import java.util.TreeSet;
+
+import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+
+import lombok.Data;
+import lombok.Getter;
+
+/**
+ * Timerange data structure
+ */
+@JsonIgnoreProperties({"astNode", "parent"})
+@Data
+public class TimeRange {
+  private String partitionColumn;
+  private Date toDate;
+  private Date fromDate;
+  private ASTNode astNode;
+  private ASTNode parent;
+  private int childIndex;
+
+  public boolean isCoverableBy(TreeSet<UpdatePeriod> updatePeriods) {
+    return DateUtil.isCoverableBy(fromDate, toDate, updatePeriods);
+  }
+
+
+  public static class TimeRangeBuilder {
+    private final TimeRange range;
+
+    public TimeRangeBuilder() {
+      this.range = new TimeRange();
+    }
+
+    public TimeRangeBuilder partitionColumn(String col) {
+      range.partitionColumn = col;
+      return this;
+    }
+
+    public TimeRangeBuilder toDate(Date to) {
+      range.toDate = to;
+      return this;
+    }
+
+    public TimeRangeBuilder fromDate(Date from) {
+      range.fromDate = from;
+      return this;
+    }
+
+    public TimeRangeBuilder astNode(ASTNode node) {
+      range.astNode = node;
+      return this;
+    }
+
+    public TimeRangeBuilder parent(ASTNode parent) {
+      range.parent = parent;
+      return this;
+    }
+
+    public TimeRangeBuilder childIndex(int childIndex) {
+      range.childIndex = childIndex;
+      return this;
+    }
+
+    public TimeRange build() {
+      return range;
+    }
+  }
+
+  public static TimeRangeBuilder getBuilder() {
+    return new TimeRangeBuilder();
+  }
+
+  private TimeRange() {
+
+  }
+
+  public void validate() throws LensException {
+    if (partitionColumn == null || fromDate == null || toDate == null || fromDate.equals(toDate)) {
+      throw new LensException(LensCubeErrorCode.INVALID_TIME_RANGE.getLensErrorInfo());
+    }
+
+    if (fromDate.after(toDate)) {
+      throw new LensException(LensCubeErrorCode.FROM_AFTER_TO.getLensErrorInfo(),
+          fromDate.toString(), toDate.toString());
+    }
+  }
+
+  public String toTimeDimWhereClause() {
+    return toTimeDimWhereClause(null, partitionColumn);
+  }
+
+  public String toTimeDimWhereClause(String prefix, String column) {
+    if (StringUtils.isNotBlank(column)) {
+      column = prefix + "." + column;
+    }
+    return new StringBuilder()
+      .append(column).append(" >= '").append(DateUtil.HIVE_QUERY_DATE_PARSER.get().format(fromDate)).append("'")
+      .append(" AND ")
+      .append(column).append(" < '").append(DateUtil.HIVE_QUERY_DATE_PARSER.get().format(toDate)).append("'")
+      .toString();
+  }
+
+  @Override
+  public String toString() {
+    return partitionColumn + " [" + ABSDATE_PARSER.get().format(fromDate) + " to "
+      + ABSDATE_PARSER.get().format(toDate) + ")";
+  }
+
+  /** iterable from fromDate(including) to toDate(excluding) incrementing increment units of updatePeriod */
+  public static Iterable iterable(Date fromDate, Date toDate, UpdatePeriod updatePeriod, int increment) {
+    return TimeRange.getBuilder().fromDate(fromDate).toDate(toDate).build().iterable(updatePeriod, increment);
+  }
+
+  /** iterable from fromDate(including) incrementing increment units of updatePeriod. Do this numIters times */
+  public static Iterable iterable(Date fromDate, int numIters, UpdatePeriod updatePeriod, int increment) {
+    return TimeRange.getBuilder().fromDate(fromDate).build().iterable(updatePeriod, numIters, increment);
+  }
+
+  private Iterable iterable(UpdatePeriod updatePeriod, int numIters, int increment) {
+    return new Iterable(updatePeriod, numIters, increment);
+  }
+
+  public Iterable iterable(UpdatePeriod updatePeriod, int increment) {
+    if (increment == 0) {
+      throw new UnsupportedOperationException("Can't iterate if iteration increment is zero");
+    }
+    long numIters = DateUtil.getTimeDiff(fromDate, toDate, updatePeriod) / increment;
+    return new Iterable(updatePeriod, numIters, increment);
+  }
+
+  /** Iterable so that foreach is supported */
+  public class Iterable implements java.lang.Iterable<Date> {
+    private UpdatePeriod updatePeriod;
+    private long numIters;
+    private int increment;
+
+    public Iterable(UpdatePeriod updatePeriod, long numIters, int increment) {
+      this.updatePeriod = updatePeriod;
+      this.numIters = numIters;
+      if (this.numIters < 0) {
+        this.numIters = 0;
+      }
+      this.increment = increment;
+    }
+
+    @Override
+    public Iterator iterator() {
+      return new Iterator();
+    }
+
+    public class Iterator implements java.util.Iterator<Date> {
+      Calendar calendar;
+      // Tracks the index of the item returned after the last next() call.
+      // Index here refers to the index if the iterator were iterated and converted into a list.
+      @Getter
+      int counter = -1;
+
+      public Iterator() {
+        calendar = Calendar.getInstance();
+        calendar.setTime(fromDate);
+      }
+
+      @Override
+      public boolean hasNext() {
+        return counter < numIters - 1;
+      }
+
+      @Override
+      public Date next() {
+        Date cur = calendar.getTime();
+        updatePeriod.increment(calendar, increment);
+        counter++;
+        return cur;
+      }
+
+      public Date peekNext() {
+        return calendar.getTime();
+      }
+
+      @Override
+      public void remove() {
+        throw new UnsupportedOperationException("remove from timerange iterator");
+      }
+
+      public long getNumIters() {
+        return numIters;
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
index 4c76a69..4238066 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
@@ -278,7 +278,7 @@ public enum UpdatePeriod implements Named {
       return cal.getTime();
     case QUARTERLY:
       Date dt = DateUtils.truncate(date, this.calendarField());
-      dt.setMonth(dt.getMonth() - dt.getMonth() % 3);
+      dt.setMonth(dt.getMonth() - (dt.getMonth() % 3));
       return dt;
     default:
       return DateUtils.truncate(date, this.calendarField());
@@ -299,6 +299,86 @@ public enum UpdatePeriod implements Named {
     calendar.add(calendarField(), increment);
   }
 
+  public Date getCeilDate(Date date) {
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(date);
+    boolean hasFraction = false;
+    switch (this) {
+    case YEARLY:
+      if (cal.get(MONTH) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case MONTHLY:
+      if (cal.get(DAY_OF_MONTH) != 1) {
+        hasFraction = true;
+        break;
+      }
+    case DAILY:
+      if (cal.get(Calendar.HOUR_OF_DAY) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case HOURLY:
+      if (cal.get(Calendar.MINUTE) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case MINUTELY:
+      if (cal.get(Calendar.SECOND) != 0) {
+        hasFraction = true;
+        break;
+      }
+    case SECONDLY:
+    case CONTINUOUS:
+      if (cal.get(Calendar.MILLISECOND) != 0) {
+        hasFraction = true;
+      }
+      break;
+    case WEEKLY:
+      if (cal.get(Calendar.DAY_OF_WEEK) != 1) {
+        hasFraction = true;
+        break;
+      }
+    }
+
+    if (hasFraction) {
+      cal.add(this.calendarField(), 1);
+      return getFloorDate(cal.getTime());
+    } else {
+      return date;
+    }
+  }
+
+  public Date getFloorDate(Date date) {
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(date);
+    switch(this) {
+    case WEEKLY:
+      cal.set(Calendar.DAY_OF_WEEK, 1);
+      break;
+    }
+    switch (this) {
+    case YEARLY:
+      cal.set(MONTH, 0);
+    case MONTHLY:
+      cal.set(DAY_OF_MONTH, 1);
+    case WEEKLY:
+      // Already covered, only here for fall through cases
+    case DAILY:
+      cal.set(Calendar.HOUR_OF_DAY, 0);
+    case HOURLY:
+      cal.set(Calendar.MINUTE, 0);
+    case MINUTELY:
+      cal.set(Calendar.SECOND, 0);
+    case SECONDLY:
+    case CONTINUOUS:
+      cal.set(Calendar.MILLISECOND, 0);
+      break;
+    }
+    return cal.getTime();
+  }
+
   public static class UpdatePeriodComparator implements Comparator<UpdatePeriod> {
     @Override
     public int compare(UpdatePeriod o1, UpdatePeriod o2) {
@@ -306,7 +386,7 @@ public enum UpdatePeriod implements Named {
         return -1;
       } else if (o1 != null && o2 == null) {
         return 1;
-      } else if (o1 == null && o2 == null) {
+      } else if (o1 == null) {
         return 0;
       } else {
         if (o1.weight > o2.weight) {

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
index 9d5e264..c588dc7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/EndsAndHolesPartitionTimeline.java
@@ -23,8 +23,8 @@ import java.util.*;
 
 import org.apache.lens.cube.metadata.MetastoreUtil;
 import org.apache.lens.cube.metadata.TimePartition;
+import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.cube.metadata.UpdatePeriod;
-import org.apache.lens.cube.parse.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 
 import com.google.common.base.Strings;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 7f81461..1884bde 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -22,10 +22,7 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
 import java.util.*;
 
-import org.apache.lens.cube.metadata.AbstractCubeTable;
-import org.apache.lens.cube.metadata.CubeFactTable;
-import org.apache.lens.cube.metadata.CubeInterface;
-import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
 import org.apache.lens.cube.parse.HQLParser.TreeNode;
 import org.apache.lens.server.api.error.LensException;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
index 9c8b5b9..78fb21d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
@@ -22,6 +22,8 @@ import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTable
 
 import java.util.*;
 
+import org.apache.lens.cube.metadata.TimeRange;
+
 import org.codehaus.jackson.annotate.JsonWriteNullProperties;
 
 import com.google.common.collect.Lists;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
deleted file mode 100644
index cd05c68..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
+++ /dev/null
@@ -1,456 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import static java.util.Calendar.*;
-
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.Set;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.metadata.UpdatePeriod;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.time.DateUtils;
-
-import lombok.Data;
-import lombok.EqualsAndHashCode;
-import lombok.extern.slf4j.Slf4j;
-
-@Slf4j
-public final class DateUtil {
-  private DateUtil() {
-
-  }
-  /*
-   * NOW -> new java.util.Date() NOW-7DAY -> a date one week earlier NOW (+-)
-   * <NUM>UNIT or Hardcoded dates in DD-MM-YYYY hh:mm:ss,sss
-   */
-  public static final String UNIT;
-
-  static {
-    StringBuilder sb = new StringBuilder();
-    String sep = "";
-    for (UpdatePeriod up : UpdatePeriod.values()) {
-      sb.append(sep).append(up.getUnitName());
-      sep = "|";
-    }
-    UNIT = sb.toString();
-  }
-
-  public static final String GRANULARITY = "\\.(" + UNIT + ")";
-  public static final String RELATIVE = "(now)(" + GRANULARITY + ")?";
-  public static final Pattern P_RELATIVE = Pattern.compile(RELATIVE, Pattern.CASE_INSENSITIVE);
-
-  public static final String WSPACE = "\\s+";
-  public static final String OPTIONAL_WSPACE = "\\s*";
-
-  public static final String SIGNAGE = "\\+|\\-";
-  public static final Pattern P_SIGNAGE = Pattern.compile(SIGNAGE);
-
-  public static final String QUANTITY = "\\d+";
-  public static final Pattern P_QUANTITY = Pattern.compile(QUANTITY);
-
-  public static final Pattern P_UNIT = Pattern.compile(UNIT, Pattern.CASE_INSENSITIVE);
-
-  public static final String RELDATE_VALIDATOR_STR = RELATIVE + OPTIONAL_WSPACE + "((" + SIGNAGE + ")" + "("
-    + WSPACE + ")?" + "(" + QUANTITY + ")" + OPTIONAL_WSPACE + "(" + UNIT + "))?" + "(s?)";
-
-  public static final Pattern RELDATE_VALIDATOR = Pattern.compile(RELDATE_VALIDATOR_STR, Pattern.CASE_INSENSITIVE);
-
-  public static final String YEAR_FMT = "[0-9]{4}";
-  public static final String MONTH_FMT = YEAR_FMT + "-[0-9]{2}";
-  public static final String DAY_FMT = MONTH_FMT + "-[0-9]{2}";
-  public static final String HOUR_FMT = DAY_FMT + "-[0-9]{2}";
-  public static final String MINUTE_FMT = HOUR_FMT + ":[0-9]{2}";
-  public static final String SECOND_FMT = MINUTE_FMT + ":[0-9]{2}";
-  public static final String MILLISECOND_FMT = SECOND_FMT + ",[0-9]{3}";
-  public static final String ABSDATE_FMT = "yyyy-MM-dd-HH:mm:ss,SSS";
-  public static final String HIVE_QUERY_DATE_FMT = "yyyy-MM-dd HH:mm:ss";
-
-  public static final ThreadLocal<DateFormat> ABSDATE_PARSER =
-    new ThreadLocal<DateFormat>() {
-      @Override
-      protected SimpleDateFormat initialValue() {
-        return new SimpleDateFormat(ABSDATE_FMT);
-      }
-    };
-  public static final ThreadLocal<DateFormat> HIVE_QUERY_DATE_PARSER =
-    new ThreadLocal<DateFormat>() {
-      @Override
-      protected SimpleDateFormat initialValue() {
-        return new SimpleDateFormat(HIVE_QUERY_DATE_FMT);
-      }
-    };
-
-  public static String getAbsDateFormatString(String str) {
-    if (str.matches(YEAR_FMT)) {
-      return str + "-01-01-00:00:00,000";
-    } else if (str.matches(MONTH_FMT)) {
-      return str + "-01-00:00:00,000";
-    } else if (str.matches(DAY_FMT)) {
-      return str + "-00:00:00,000";
-    } else if (str.matches(HOUR_FMT)) {
-      return str + ":00:00,000";
-    } else if (str.matches(MINUTE_FMT)) {
-      return str + ":00,000";
-    } else if (str.matches(SECOND_FMT)) {
-      return str + ",000";
-    } else if (str.matches(MILLISECOND_FMT)) {
-      return str;
-    }
-    throw new IllegalArgumentException("Unsupported formatting for date" + str);
-  }
-
-  public static Date resolveDate(String str, Date now) throws LensException {
-    if (RELDATE_VALIDATOR.matcher(str).matches()) {
-      return resolveRelativeDate(str, now);
-    } else {
-      return resolveAbsoluteDate(str);
-    }
-  }
-
-  public static String relativeToAbsolute(String relative) throws LensException {
-    return relativeToAbsolute(relative, new Date());
-  }
-
-  public static String relativeToAbsolute(String relative, Date now) throws LensException {
-    if (RELDATE_VALIDATOR.matcher(relative).matches()) {
-      return ABSDATE_PARSER.get().format(resolveRelativeDate(relative, now));
-    } else {
-      return relative;
-    }
-  }
-
-  public static Date resolveAbsoluteDate(String str) throws LensException {
-    try {
-      return ABSDATE_PARSER.get().parse(getAbsDateFormatString(str));
-    } catch (ParseException e) {
-      log.error("Invalid date format. expected only {} date provided:{}", ABSDATE_FMT, str, e);
-      throw new LensException(LensCubeErrorCode.WRONG_TIME_RANGE_FORMAT.getLensErrorInfo(), ABSDATE_FMT, str);
-    }
-  }
-
-  public static Date resolveRelativeDate(String str, Date now) throws LensException {
-    if (StringUtils.isBlank(str)) {
-      throw new LensException(LensCubeErrorCode.NULL_DATE_VALUE.getLensErrorInfo());
-    }
-
-    // Resolve NOW with proper granularity
-    Calendar calendar = Calendar.getInstance();
-    calendar.setTime(now);
-
-    str = str.toLowerCase();
-    Matcher relativeMatcher = P_RELATIVE.matcher(str);
-    if (relativeMatcher.find()) {
-      String nowWithGranularity = relativeMatcher.group();
-      nowWithGranularity = nowWithGranularity.replaceAll("now", "");
-      nowWithGranularity = nowWithGranularity.replaceAll("\\.", "");
-
-      Matcher granularityMatcher = P_UNIT.matcher(nowWithGranularity);
-      if (granularityMatcher.find()) {
-        calendar = UpdatePeriod.fromUnitName(granularityMatcher.group().toLowerCase()).truncate(calendar);
-      }
-    }
-
-    // Get rid of 'now' part and whitespace
-    String diffStr = str.replaceAll(RELATIVE, "").replace(WSPACE, "");
-    TimeDiff diff = TimeDiff.parseFrom(diffStr);
-    return diff.offsetFrom(calendar.getTime());
-  }
-
-  public static Date getCeilDate(Date fromDate, UpdatePeriod interval) {
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(fromDate);
-    boolean hasFraction = false;
-    switch (interval) {
-    case YEARLY:
-      if (cal.get(MONTH) != 0) {
-        hasFraction = true;
-        break;
-      }
-    case MONTHLY:
-      if (cal.get(DAY_OF_MONTH) != 1) {
-        hasFraction = true;
-        break;
-      }
-    case DAILY:
-      if (cal.get(Calendar.HOUR_OF_DAY) != 0) {
-        hasFraction = true;
-        break;
-      }
-    case HOURLY:
-      if (cal.get(Calendar.MINUTE) != 0) {
-        hasFraction = true;
-        break;
-      }
-    case MINUTELY:
-      if (cal.get(Calendar.SECOND) != 0) {
-        hasFraction = true;
-        break;
-      }
-    case SECONDLY:
-    case CONTINUOUS:
-      if (cal.get(Calendar.MILLISECOND) != 0) {
-        hasFraction = true;
-      }
-      break;
-    case WEEKLY:
-      if (cal.get(Calendar.DAY_OF_WEEK) != 1) {
-        hasFraction = true;
-        break;
-      }
-    }
-
-    if (hasFraction) {
-      cal.add(interval.calendarField(), 1);
-      return getFloorDate(cal.getTime(), interval);
-    } else {
-      return fromDate;
-    }
-  }
-
-  public static Date getFloorDate(Date toDate, UpdatePeriod interval) {
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(toDate);
-    switch (interval) {
-    case YEARLY:
-      cal.set(MONTH, 0);
-    case MONTHLY:
-      cal.set(DAY_OF_MONTH, 1);
-    case DAILY:
-      cal.set(Calendar.HOUR_OF_DAY, 0);
-    case HOURLY:
-      cal.set(Calendar.MINUTE, 0);
-    case MINUTELY:
-      cal.set(Calendar.SECOND, 0);
-    case SECONDLY:
-    case CONTINUOUS:
-      cal.set(Calendar.MILLISECOND, 0);
-      break;
-    case WEEKLY:
-      cal.set(Calendar.DAY_OF_WEEK, 1);
-      cal.set(Calendar.HOUR_OF_DAY, 0);
-      cal.set(Calendar.MINUTE, 0);
-      cal.set(Calendar.SECOND, 0);
-      cal.set(Calendar.MILLISECOND, 0);
-      break;
-    }
-    return cal.getTime();
-  }
-
-  public static CoveringInfo getMonthlyCoveringInfo(Date from, Date to) {
-    // Move 'from' to end of month, unless its the first day of month
-    boolean coverable = true;
-    if (!from.equals(DateUtils.truncate(from, MONTH))) {
-      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
-      coverable = false;
-    }
-
-    // Move 'to' to beginning of next month, unless its the first day of the month
-    if (!to.equals(DateUtils.truncate(to, MONTH))) {
-      to = DateUtils.truncate(to, MONTH);
-      coverable = false;
-    }
-
-    int months = 0;
-    while (from.before(to)) {
-      from = DateUtils.addMonths(from, 1);
-      months++;
-    }
-    return new CoveringInfo(months, coverable);
-  }
-
-  public static CoveringInfo getQuarterlyCoveringInfo(Date from, Date to) {
-    CoveringInfo monthlyCoveringInfo = getMonthlyCoveringInfo(from, to);
-    if (monthlyCoveringInfo.getCountBetween() < 3) {
-      return new CoveringInfo(0, false);
-    }
-    boolean coverable = monthlyCoveringInfo.isCoverable();
-    if (!from.equals(DateUtils.truncate(from, MONTH))) {
-      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
-      coverable = false;
-    }
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(from);
-    int fromMonth = cal.get(MONTH);
-
-    // Get the start date of the quarter
-    int beginOffset = (3 - fromMonth % 3) % 3;
-    int endOffset = (monthlyCoveringInfo.getCountBetween() - beginOffset) % 3;
-    if (beginOffset > 0 || endOffset > 0) {
-      coverable = false;
-    }
-    return new CoveringInfo((monthlyCoveringInfo.getCountBetween() - beginOffset - endOffset) / 3, coverable);
-  }
-
-
-  public static CoveringInfo getYearlyCoveringInfo(Date from, Date to) {
-    CoveringInfo monthlyCoveringInfo = getMonthlyCoveringInfo(from, to);
-    if (monthlyCoveringInfo.getCountBetween() < 12) {
-      return new CoveringInfo(0, false);
-    }
-    boolean coverable = monthlyCoveringInfo.isCoverable();
-    if (!from.equals(DateUtils.truncate(from, MONTH))) {
-      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
-      coverable = false;
-    }
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(from);
-    int fromMonth = cal.get(MONTH);
-    int beginOffset = (12 - fromMonth % 12) % 12;
-    int endOffset = (monthlyCoveringInfo.getCountBetween() - beginOffset) % 12;
-    if (beginOffset > 0 || endOffset > 0) {
-      coverable = false;
-    }
-    return new CoveringInfo((monthlyCoveringInfo.getCountBetween() - beginOffset - endOffset) / 12, coverable);
-  }
-
-  public static CoveringInfo getWeeklyCoveringInfo(Date from, Date to) {
-    int dayDiff = 0;
-    Date tmpFrom = from;
-    while (tmpFrom.before(to)) {
-      tmpFrom = DateUtils.addDays(tmpFrom, 1);
-      dayDiff++;
-    }
-
-    if (dayDiff < 7) {
-      return new CoveringInfo(0, false);
-    }
-
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(from);
-    int fromDay = cal.get(Calendar.DAY_OF_WEEK);
-    cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
-    Date fromWeekStartDate = cal.getTime();
-    boolean coverable = dayDiff % 7 == 0;
-    if (fromWeekStartDate.before(from)) {
-      // Count from the start of next week
-      dayDiff -= (cal.getActualMaximum(Calendar.DAY_OF_WEEK) - (fromDay - Calendar.SUNDAY));
-      coverable = false;
-    }
-
-    return new CoveringInfo(dayDiff / 7, coverable);
-  }
-
-  static CoveringInfo getCoveringInfo(Date from, Date to, UpdatePeriod interval) {
-    switch (interval) {
-    case SECONDLY:
-    case CONTINUOUS:
-      return getMilliSecondCoveringInfo(from, to, 1000);
-    case MINUTELY:
-    case HOURLY:
-    case DAILY:
-      return getMilliSecondCoveringInfo(from, to, interval.weight());
-    case WEEKLY:
-      return getWeeklyCoveringInfo(from, to);
-    case MONTHLY:
-      return getMonthlyCoveringInfo(from, to);
-    case QUARTERLY:
-      return getQuarterlyCoveringInfo(from, to);
-    case YEARLY:
-      return getYearlyCoveringInfo(from, to);
-    default:
-      return new CoveringInfo(0, false);
-    }
-  }
-
-  private static CoveringInfo getMilliSecondCoveringInfo(Date from, Date to, long millisInInterval) {
-    long diff = to.getTime() - from.getTime();
-    return new CoveringInfo((int) (diff / millisInInterval), diff % millisInInterval == 0);
-  }
-
-  static boolean isCoverableBy(Date from, Date to, Set<UpdatePeriod> intervals) {
-    for (UpdatePeriod period : intervals) {
-      if (getCoveringInfo(from, to, period).isCoverable()) {
-        return true;
-      }
-    }
-    return false;
-  }
-
-  public static int getTimeDiff(Date fromDate, Date toDate, UpdatePeriod updatePeriod) {
-    if (fromDate.before(toDate)) {
-      return getCoveringInfo(fromDate, toDate, updatePeriod).getCountBetween();
-    } else {
-      return -getCoveringInfo(toDate, fromDate, updatePeriod).getCountBetween();
-    }
-  }
-
-  @Data
-  public static class CoveringInfo {
-    int countBetween;
-    boolean coverable;
-
-    public CoveringInfo(int countBetween, boolean coverable) {
-      this.countBetween = countBetween;
-      this.coverable = coverable;
-    }
-  }
-
-  @EqualsAndHashCode
-  public static class TimeDiff {
-    int quantity;
-    UpdatePeriod updatePeriod;
-
-    private TimeDiff(int quantity, UpdatePeriod updatePeriod) {
-      this.quantity = quantity;
-      this.updatePeriod = updatePeriod;
-    }
-
-    public static TimeDiff parseFrom(String diffStr) throws LensException {
-      // Get the relative diff part to get eventual date based on now.
-      Matcher qtyMatcher = P_QUANTITY.matcher(diffStr);
-      int qty = 1;
-      if (qtyMatcher.find()) {
-        qty = Integer.parseInt(qtyMatcher.group());
-      }
-
-      Matcher signageMatcher = P_SIGNAGE.matcher(diffStr);
-      if (signageMatcher.find()) {
-        String sign = signageMatcher.group();
-        if ("-".equals(sign)) {
-          qty = -qty;
-        }
-      }
-
-      Matcher unitMatcher = P_UNIT.matcher(diffStr);
-      if (unitMatcher.find()) {
-        return new TimeDiff(qty, UpdatePeriod.fromUnitName(unitMatcher.group().toLowerCase()));
-      }
-      return new TimeDiff(0, UpdatePeriod.CONTINUOUS);
-    }
-
-    public Date offsetFrom(Date time) {
-      return DateUtils.add(time, updatePeriod.calendarField(), quantity);
-    }
-
-    public Date negativeOffsetFrom(Date time) {
-      return DateUtils.add(time, updatePeriod.calendarField(), -quantity);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 200a48c..776021d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -19,18 +19,11 @@
 
 package org.apache.lens.cube.parse;
 
-import static org.apache.hadoop.hive.ql.parse.HiveParser.DOT;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TABLE_OR_COL;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
 import java.util.*;
 
-import org.apache.lens.cube.metadata.AbstractBaseTable;
-import org.apache.lens.cube.metadata.AbstractCubeTable;
-import org.apache.lens.cube.metadata.CubeColumn;
-import org.apache.lens.cube.metadata.CubeInterface;
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.cube.metadata.ExprColumn;
+import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
index 60b2dde..f7271e5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
@@ -21,7 +21,7 @@ package org.apache.lens.cube.parse;
 import java.util.Map;
 
 import org.apache.lens.cube.metadata.Dimension;
-
+import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index cc8e68c..62cc071 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -18,6 +18,7 @@
  */
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.metadata.DateUtil.WSPACE;
 import static org.apache.lens.cube.metadata.MetastoreUtil.getFactOrDimtableStorageTableName;
 import static org.apache.lens.cube.metadata.MetastoreUtil.getStoragetableEndTimesKey;
 import static org.apache.lens.cube.metadata.MetastoreUtil.getStoragetableStartTimesKey;
@@ -25,7 +26,6 @@ import static org.apache.lens.cube.parse.CandidateTablePruneCause.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode.PART_COL_DOES_NOT_EXIST;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode.RANGE_NOT_ANSWERABLE;
-import static org.apache.lens.cube.parse.DateUtil.WSPACE;
 import static org.apache.lens.cube.parse.StorageUtil.joinWithAnd;
 
 import java.text.DateFormat;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
deleted file mode 100644
index 7be7ace..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
+++ /dev/null
@@ -1,220 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import static org.apache.lens.cube.parse.DateUtil.ABSDATE_PARSER;
-
-import java.util.Calendar;
-import java.util.Date;
-import java.util.TreeSet;
-
-import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.metadata.UpdatePeriod;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
-
-import lombok.Data;
-import lombok.Getter;
-
-/**
- * Timerange data structure
- */
-@JsonIgnoreProperties({"astNode", "parent"})
-@Data
-public class TimeRange {
-  private String partitionColumn;
-  private Date toDate;
-  private Date fromDate;
-  private ASTNode astNode;
-  private ASTNode parent;
-  private int childIndex;
-
-  public boolean isCoverableBy(TreeSet<UpdatePeriod> updatePeriods) {
-    return DateUtil.isCoverableBy(fromDate, toDate, updatePeriods);
-  }
-
-
-  public static class TimeRangeBuilder {
-    private final TimeRange range;
-
-    public TimeRangeBuilder() {
-      this.range = new TimeRange();
-    }
-
-    public TimeRangeBuilder partitionColumn(String col) {
-      range.partitionColumn = col;
-      return this;
-    }
-
-    public TimeRangeBuilder toDate(Date to) {
-      range.toDate = to;
-      return this;
-    }
-
-    public TimeRangeBuilder fromDate(Date from) {
-      range.fromDate = from;
-      return this;
-    }
-
-    public TimeRangeBuilder astNode(ASTNode node) {
-      range.astNode = node;
-      return this;
-    }
-
-    public TimeRangeBuilder parent(ASTNode parent) {
-      range.parent = parent;
-      return this;
-    }
-
-    public TimeRangeBuilder childIndex(int childIndex) {
-      range.childIndex = childIndex;
-      return this;
-    }
-
-    public TimeRange build() {
-      return range;
-    }
-  }
-
-  public static TimeRangeBuilder getBuilder() {
-    return new TimeRangeBuilder();
-  }
-
-  private TimeRange() {
-
-  }
-
-  public void validate() throws LensException {
-    if (partitionColumn == null || fromDate == null || toDate == null || fromDate.equals(toDate)) {
-      throw new LensException(LensCubeErrorCode.INVALID_TIME_RANGE.getLensErrorInfo());
-    }
-
-    if (fromDate.after(toDate)) {
-      throw new LensException(LensCubeErrorCode.FROM_AFTER_TO.getLensErrorInfo(),
-          fromDate.toString(), toDate.toString());
-    }
-  }
-
-  public String toTimeDimWhereClause() {
-    return toTimeDimWhereClause(null, partitionColumn);
-  }
-
-  public String toTimeDimWhereClause(String prefix, String column) {
-    if (StringUtils.isNotBlank(column)) {
-      column = prefix + "." + column;
-    }
-    return new StringBuilder()
-      .append(column).append(" >= '").append(DateUtil.HIVE_QUERY_DATE_PARSER.get().format(fromDate)).append("'")
-      .append(" AND ")
-      .append(column).append(" < '").append(DateUtil.HIVE_QUERY_DATE_PARSER.get().format(toDate)).append("'")
-      .toString();
-  }
-
-  @Override
-  public String toString() {
-    return partitionColumn + " [" + ABSDATE_PARSER.get().format(fromDate) + " to "
-      + ABSDATE_PARSER.get().format(toDate) + ")";
-  }
-
-  /** iterable from fromDate(including) to toDate(excluding) incrementing increment units of updatePeriod */
-  public static Iterable iterable(Date fromDate, Date toDate, UpdatePeriod updatePeriod, int increment) {
-    return TimeRange.getBuilder().fromDate(fromDate).toDate(toDate).build().iterable(updatePeriod, increment);
-  }
-
-  /** iterable from fromDate(including) incrementing increment units of updatePeriod. Do this numIters times */
-  public static Iterable iterable(Date fromDate, int numIters, UpdatePeriod updatePeriod, int increment) {
-    return TimeRange.getBuilder().fromDate(fromDate).build().iterable(updatePeriod, numIters, increment);
-  }
-
-  private Iterable iterable(UpdatePeriod updatePeriod, int numIters, int increment) {
-    return new Iterable(updatePeriod, numIters, increment);
-  }
-
-  public Iterable iterable(UpdatePeriod updatePeriod, int increment) {
-    if (increment == 0) {
-      throw new UnsupportedOperationException("Can't iterate if iteration increment is zero");
-    }
-    long numIters = DateUtil.getTimeDiff(fromDate, toDate, updatePeriod) / increment;
-    return new Iterable(updatePeriod, numIters, increment);
-  }
-
-  /** Iterable so that foreach is supported */
-  public class Iterable implements java.lang.Iterable<Date> {
-    private UpdatePeriod updatePeriod;
-    private long numIters;
-    private int increment;
-
-    public Iterable(UpdatePeriod updatePeriod, long numIters, int increment) {
-      this.updatePeriod = updatePeriod;
-      this.numIters = numIters;
-      if (this.numIters < 0) {
-        this.numIters = 0;
-      }
-      this.increment = increment;
-    }
-
-    @Override
-    public Iterator iterator() {
-      return new Iterator();
-    }
-
-    public class Iterator implements java.util.Iterator<Date> {
-      Calendar calendar;
-      // Tracks the index of the item returned after the last next() call.
-      // Index here refers to the index if the iterator were iterated and converted into a list.
-      @Getter
-      int counter = -1;
-
-      public Iterator() {
-        calendar = Calendar.getInstance();
-        calendar.setTime(fromDate);
-      }
-
-      @Override
-      public boolean hasNext() {
-        return counter < numIters - 1;
-      }
-
-      @Override
-      public Date next() {
-        Date cur = calendar.getTime();
-        updatePeriod.increment(calendar, increment);
-        counter++;
-        return cur;
-      }
-
-      public Date peekNext() {
-        return calendar.getTime();
-      }
-
-      @Override
-      public void remove() {
-        throw new UnsupportedOperationException("remove from timerange iterator");
-      }
-
-      public long getNumIters() {
-        return numIters;
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
index f772279..1a83d09 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
@@ -27,10 +27,7 @@ import java.util.*;
 import org.apache.lens.cube.error.ColUnAvailableInTimeRange;
 import org.apache.lens.cube.error.ColUnAvailableInTimeRangeException;
 import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.metadata.AbstractCubeTable;
-import org.apache.lens.cube.metadata.CubeColumn;
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.cube.metadata.SchemaGraph;
+import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.DenormalizationResolver.ReferencedQueriedColumn;
 import org.apache.lens.server.api.error.LensException;
 

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java
index 2abc6d0..25eaaef 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/CubeFactTableTest.java
@@ -26,7 +26,6 @@ import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.lens.cube.parse.DateUtil;
 import org.apache.lens.server.api.error.LensException;
 
 import org.testng.annotations.DataProvider;

http://git-wip-us.apache.org/repos/asf/lens/blob/7c7c86da/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
new file mode 100644
index 0000000..87e4ce3
--- /dev/null
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata;
+
+
+import static org.apache.lens.cube.metadata.UpdatePeriod.*;
+
+import java.text.DateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.HashMap;
+
+public class DateFactory {
+  private DateFactory() {
+
+  }
+
+  public static class DateOffsetProvider extends HashMap<Integer, Date> {
+    private final UpdatePeriod updatePeriod;
+    Calendar calendar = Calendar.getInstance();
+
+    public DateOffsetProvider(UpdatePeriod updatePeriod) {
+      this(updatePeriod, false);
+    }
+
+    public DateOffsetProvider(UpdatePeriod updatePeriod, boolean truncate) {
+      this.updatePeriod = updatePeriod;
+      Date date = calendar.getTime();
+      if (truncate) {
+        date = updatePeriod.truncate(date);
+        calendar.setTime(date);
+      }
+      put(0, date);
+    }
+
+    @Override
+    public Date get(Object key) {
+      if (!containsKey(key) && key instanceof Integer) {
+        calendar.setTime(super.get(0));
+        calendar.add(updatePeriod.calendarField(), (Integer) key);
+        put((Integer) key, calendar.getTime());
+      }
+      return super.get(key);
+    }
+  }
+
+  public static class GeneralDateOffsetProvider extends HashMap<UpdatePeriod, DateOffsetProvider> {
+    @Override
+    public DateOffsetProvider get(Object key) {
+      if (!containsKey(key) && key instanceof UpdatePeriod) {
+        UpdatePeriod up = (UpdatePeriod) key;
+        put(up, new DateOffsetProvider(up));
+      }
+      return super.get(key);
+    }
+
+    public Date get(UpdatePeriod updatePeriod, int offset) {
+      return get(updatePeriod).get(offset);
+    }
+  }
+
+  public static final GeneralDateOffsetProvider GENERAL_DATE_OFFSET_PROVIDER = new GeneralDateOffsetProvider();
+
+
+  public static Date getDateWithOffset(UpdatePeriod up, int offset) {
+    return GENERAL_DATE_OFFSET_PROVIDER.get(up, offset);
+  }
+
+  public static String getDateStringWithOffset(UpdatePeriod up, int offset) {
+    return getDateStringWithOffset(up, offset, up);
+  }
+
+  public static String getDateStringWithOffset(UpdatePeriod up, int offset, UpdatePeriod formatWith) {
+    return formatWith.format(GENERAL_DATE_OFFSET_PROVIDER.get(up, offset));
+  }
+
+  public static String getTimeRangeString(final String timeDim, final String startDate, final String endDate) {
+    return "time_range_in(" + timeDim + ", '" + startDate + "','" + endDate + "')";
+  }
+
+  public static String getTimeRangeString(final String timeDim, final UpdatePeriod updatePeriod,
+    final int startOffset, final int endOffset) {
+    return getTimeRangeString(timeDim,
+      getDateStringWithOffset(updatePeriod, startOffset), getDateStringWithOffset(updatePeriod, endOffset));
+  }
+
+  public static String getTimeRangeString(final String startDate, final String endDate) {
+    return getTimeRangeString("d_time", startDate, endDate);
+  }
+
+  public static String getTimeRangeString(final UpdatePeriod updatePeriod,
+    final int startOffset, final int endOffset) {
+    return getTimeRangeString("d_time", updatePeriod, startOffset, endOffset);
+  }
+
+  public static String getTimeRangeString(String partCol, UpdatePeriod updatePeriod, int startOffset, int endOffset,
+    UpdatePeriod formatWith) {
+    return getTimeRangeString(partCol,
+      formatWith.format(getDateWithOffset(updatePeriod, startOffset)),
+      formatWith.format(getDateWithOffset(updatePeriod, endOffset)));
+  }
+
+  public static String getTimeRangeString(String partCol, UpdatePeriod updatePeriod, int startOffset, int endOffset,
+    DateFormat formatWith) {
+    return getTimeRangeString(partCol,
+      formatWith.format(getDateWithOffset(updatePeriod, startOffset)),
+      formatWith.format(getDateWithOffset(updatePeriod, endOffset)));
+  }
+
+  public static String getTimeRangeString(UpdatePeriod updatePeriod, int startOffset, int endOffset,
+    UpdatePeriod formatWith) {
+    return getTimeRangeString("d_time", updatePeriod, startOffset, endOffset, formatWith);
+  }
+
+  public static String getTimeRangeString(UpdatePeriod updatePeriod, int startOffset, int endOffset,
+    DateFormat formatWith) {
+    return getTimeRangeString("d_time", updatePeriod, startOffset, endOffset, formatWith);
+  }
+
+  // Time Instances as Date Type
+  public static final Date NOW;
+  public static final Date TWODAYS_BACK;
+  public static final Date TWO_MONTHS_BACK;
+  public static final Date BEFORE_6_DAYS;
+  public static final Date BEFORE_4_DAYS;
+
+  // Time Ranges
+  public static final String LAST_HOUR_TIME_RANGE;
+  public static final String TWO_DAYS_RANGE;
+  public static final String TWO_DAYS_RANGE_TTD;
+  public static final String TWO_DAYS_RANGE_TTD_BEFORE_4_DAYS;
+  public static final String TWO_DAYS_RANGE_TTD2;
+  public static final String TWO_DAYS_RANGE_TTD2_BEFORE_4_DAYS;
+  public static final String TWO_DAYS_RANGE_IT;
+  public static final String THIS_YEAR_RANGE;
+  public static final String LAST_YEAR_RANGE;
+  public static final String TWO_MONTHS_RANGE_UPTO_MONTH;
+  public static final String TWO_MONTHS_RANGE_UPTO_HOURS;
+  public static final String TWO_DAYS_RANGE_BEFORE_4_DAYS;
+  private static boolean zerothHour;
+
+
+  public static boolean isZerothHour() {
+    return zerothHour;
+  }
+
+  static {
+    NOW = getDateWithOffset(HOURLY, 0);
+
+    // Figure out if current hour is 0th hour
+    zerothHour = getDateStringWithOffset(HOURLY, 0).endsWith("-00");
+
+    TWODAYS_BACK = getDateWithOffset(DAILY, -2);
+    System.out.println("Test TWODAYS_BACK:" + TWODAYS_BACK);
+
+    // two months back
+    TWO_MONTHS_BACK = getDateWithOffset(MONTHLY, -2);
+    System.out.println("Test TWO_MONTHS_BACK:" + TWO_MONTHS_BACK);
+
+    // Before 4days
+    BEFORE_4_DAYS = getDateWithOffset(DAILY, -4);
+    BEFORE_6_DAYS = getDateWithOffset(DAILY, -6);
+
+    TWO_DAYS_RANGE_BEFORE_4_DAYS = getTimeRangeString(DAILY, -6, -4, HOURLY);
+
+    TWO_DAYS_RANGE = getTimeRangeString(HOURLY, -48, 0);
+    TWO_DAYS_RANGE_TTD = getTimeRangeString("test_time_dim", DAILY, -2, 0, HOURLY);
+    TWO_DAYS_RANGE_TTD_BEFORE_4_DAYS = getTimeRangeString("test_time_dim", DAILY, -6, -4, HOURLY);
+    TWO_DAYS_RANGE_TTD2 = getTimeRangeString("test_time_dim2", DAILY, -2, 0, HOURLY);
+    TWO_DAYS_RANGE_TTD2_BEFORE_4_DAYS = getTimeRangeString("test_time_dim2", DAILY, -6, -4, HOURLY);
+    TWO_DAYS_RANGE_IT = getTimeRangeString("it", DAILY, -2, 0, HOURLY);
+    THIS_YEAR_RANGE = getTimeRangeString(YEARLY, 0, 1);
+    LAST_YEAR_RANGE = getTimeRangeString(YEARLY, -1, 0);
+    TWO_MONTHS_RANGE_UPTO_MONTH = getTimeRangeString(MONTHLY, -2, 0);
+    TWO_MONTHS_RANGE_UPTO_HOURS = getTimeRangeString(MONTHLY, -2, 0, HOURLY);
+
+    // calculate LAST_HOUR_TIME_RANGE
+    LAST_HOUR_TIME_RANGE = getTimeRangeString(HOURLY, -1, 0);
+  }
+}


[04/50] [abbrv] lens git commit: LENS-719: Allow fact start time to be specified for a storage and a update period

Posted by sh...@apache.org.
LENS-719: Allow fact start time to be specified for a storage and a update period


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/3ed191ac
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/3ed191ac
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/3ed191ac

Branch: refs/heads/LENS-581
Commit: 3ed191aca6c18be7c53afefdea28a6d08ae1fd07
Parents: 10dcebb
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Mon Nov 16 11:41:05 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Mon Nov 16 11:41:05 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/cube-0.1.xsd        |  9 ++++
 .../lens/cube/metadata/MetastoreUtil.java       |  6 +++
 .../cube/parse/CandidateTablePruneCause.java    |  2 +
 .../org/apache/lens/cube/parse/DateUtil.java    |  4 +-
 .../lens/cube/parse/StorageTableResolver.java   | 51 +++++++++++++++++---
 .../apache/lens/cube/parse/CubeTestSetup.java   | 13 +++--
 .../lens/cube/parse/TestCubeRewriter.java       |  5 +-
 7 files changed, 76 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/3ed191ac/lens-api/src/main/resources/cube-0.1.xsd
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/cube-0.1.xsd b/lens-api/src/main/resources/cube-0.1.xsd
index 5d7630d..4092133 100644
--- a/lens-api/src/main/resources/cube-0.1.xsd
+++ b/lens-api/src/main/resources/cube-0.1.xsd
@@ -828,6 +828,15 @@
             The following properties can be specified for Elastic search tables :
             1. lens.metastore.es.index.name : The underlying ES index name.
             2. lens.metastore.es.type.name : The underlying ES type name.
+            Start and End times for storage table:
+            1. cube.storagetable.start.times: Comma separated list of start times for this table.
+               Start times can be relative times(e.g. now.day - 1 month) or absolute times(e.g. 2014-02)
+               The max of the start times will be considered as the final start time. This storagetable will
+               not be candidate for answering time ranges completely before its start time.
+            2. cube.storagetable.end.times: Comma separated list of end times for this table.
+               End times can be relative times(e.g. now.day - 1 month) or absolute times(e.g. 2014-02)
+               The min of the end times will be considered as the final end time. This storagetable will not be
+               candidate for answering time ranges completely after its end time.
           </xs:documentation>
         </xs:annotation>
       </xs:element>

http://git-wip-us.apache.org/repos/asf/lens/blob/3ed191ac/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
index 2796cd9..e5cf468 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
@@ -150,6 +150,12 @@ public class MetastoreUtil {
   public static String getCubeColEndTimePropertyKey(String colName) {
     return getColumnKeyPrefix(colName) + END_TIME_SFX;
   }
+  public static String getStoragetableStartTimesKey(){
+    return STORAGE_PFX + "start.times";
+  }
+  public static String getStoragetableEndTimesKey(){
+    return STORAGE_PFX + "end.times";
+  }
 
   public static String getCubeColCostPropertyKey(String colName) {
     return getColumnKeyPrefix(colName) + COST_SFX;

http://git-wip-us.apache.org/repos/asf/lens/blob/3ed191ac/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
index 9ea43bb..9c8b5b9 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
@@ -180,6 +180,8 @@ public class CandidateTablePruneCause {
     NO_PARTITIONS,
     // partition column does not exist
     PART_COL_DOES_NOT_EXIST,
+    // Range is not supported by this storage table
+    RANGE_NOT_ANSWERABLE,
     // storage is not supported by execution engine
     UNSUPPORTED
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/3ed191ac/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
index 67932da..4690d1d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
@@ -63,7 +63,7 @@ public final class DateUtil {
   }
 
   public static final String GRANULARITY = "\\.(" + UNIT + ")";
-  public static final String RELATIVE = "(now){1}(" + GRANULARITY + "){0,1}";
+  public static final String RELATIVE = "(now)(" + GRANULARITY + ")?";
   public static final Pattern P_RELATIVE = Pattern.compile(RELATIVE, Pattern.CASE_INSENSITIVE);
 
   public static final String WSPACE = "\\s+";
@@ -79,7 +79,7 @@ public final class DateUtil {
   public static final Pattern P_UNIT = Pattern.compile(UNIT, Pattern.CASE_INSENSITIVE);
 
   public static final String RELDATE_VALIDATOR_STR = RELATIVE + OPTIONAL_WSPACE + "((" + SIGNAGE + ")" + "("
-    + WSPACE + ")?" + "(" + QUANTITY + ")" + OPTIONAL_WSPACE + "(" + UNIT + ")){0,1}" + "(s?)";
+    + WSPACE + ")?" + "(" + QUANTITY + ")" + OPTIONAL_WSPACE + "(" + UNIT + "))?" + "(s?)";
 
   public static final Pattern RELDATE_VALIDATOR = Pattern.compile(RELDATE_VALIDATOR_STR, Pattern.CASE_INSENSITIVE);
 

http://git-wip-us.apache.org/repos/asf/lens/blob/3ed191ac/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index f67fc26..4db1626 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -19,9 +19,12 @@
 package org.apache.lens.cube.parse;
 
 import static org.apache.lens.cube.metadata.MetastoreUtil.getFactOrDimtableStorageTableName;
+import static org.apache.lens.cube.metadata.MetastoreUtil.getStoragetableEndTimesKey;
+import static org.apache.lens.cube.metadata.MetastoreUtil.getStoragetableStartTimesKey;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode.PART_COL_DOES_NOT_EXIST;
+import static org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode.RANGE_NOT_ANSWERABLE;
 import static org.apache.lens.cube.parse.DateUtil.WSPACE;
 import static org.apache.lens.cube.parse.StorageUtil.joinWithAnd;
 
@@ -69,6 +72,7 @@ class StorageTableResolver implements ContextRewriter {
   private TimeRangeWriter rangeWriter;
   private DateFormat partWhereClauseFormat = null;
   private PHASE phase;
+  private HashMap<CubeFactTable, Map<String, SkipStorageCause>> skipStorageCausesPerFact;
 
   enum PHASE {
     FACT_TABLES, FACT_PARTITIONS, DIM_TABLE_AND_PARTITIONS;
@@ -179,7 +183,7 @@ class StorageTableResolver implements ContextRewriter {
         Set<String> storageTables = new HashSet<String>();
         Map<String, String> whereClauses = new HashMap<String, String>();
         boolean foundPart = false;
-        Map<String, SkipStorageCause> skipStorageCauses = new HashMap<String, SkipStorageCause>();
+        Map<String, SkipStorageCause> skipStorageCauses = new HashMap<>();
         for (String storage : dimtable.getStorages()) {
           if (isStorageSupported(storage)) {
             String tableName = getFactOrDimtableStorageTableName(dimtable.getName(), storage).toLowerCase();
@@ -235,6 +239,7 @@ class StorageTableResolver implements ContextRewriter {
   // Resolves all the storage table names, which are valid for each updatePeriod
   private void resolveFactStorageTableNames(CubeQueryContext cubeql) throws LensException {
     Iterator<CandidateFact> i = cubeql.getCandidateFacts().iterator();
+    skipStorageCausesPerFact = new HashMap<>();
     while (i.hasNext()) {
       CubeFactTable fact = i.next().fact;
       if (fact.getUpdatePeriods().isEmpty()) {
@@ -247,7 +252,7 @@ class StorageTableResolver implements ContextRewriter {
       String str = conf.get(CubeQueryConfUtil.getValidStorageTablesKey(fact.getName()));
       List<String> validFactStorageTables =
         StringUtils.isBlank(str) ? null : Arrays.asList(StringUtils.split(str.toLowerCase(), ","));
-      Map<String, SkipStorageCause> skipStorageCauses = new HashMap<String, SkipStorageCause>();
+      Map<String, SkipStorageCause> skipStorageCauses = new HashMap<>();
 
       for (Map.Entry<String, Set<UpdatePeriod>> entry : fact.getUpdatePeriods().entrySet()) {
         String storage = entry.getKey();
@@ -281,7 +286,7 @@ class StorageTableResolver implements ContextRewriter {
           }
           Set<String> storageTables = storageTableMap.get(updatePeriod);
           if (storageTables == null) {
-            storageTables = new LinkedHashSet<String>();
+            storageTables = new LinkedHashSet<>();
             storageTableMap.put(updatePeriod, storageTables);
           }
           isStorageAdded = true;
@@ -292,6 +297,7 @@ class StorageTableResolver implements ContextRewriter {
           skipStorageCauses.put(storage, SkipStorageCause.noCandidateUpdatePeriod(skipUpdatePeriodCauses));
         }
       }
+      skipStorageCausesPerFact.put(fact, skipStorageCauses);
       if (storageTableMap.isEmpty()) {
         log.info("Not considering fact table:{} as it does not have any storage tables", fact);
         cubeql.addFactPruningMsgs(fact, noCandidateStorages(skipStorageCauses));
@@ -359,7 +365,10 @@ class StorageTableResolver implements ContextRewriter {
     while (i.hasNext()) {
       CandidateFact cfact = i.next();
       List<FactPartition> answeringParts = new ArrayList<>();
-      HashMap<String, SkipStorageCause> skipStorageCauses = new HashMap<>();
+      Map<String, SkipStorageCause> skipStorageCauses = skipStorageCausesPerFact.get(cfact.fact);
+      if (skipStorageCauses == null) {
+        skipStorageCauses = new HashMap<>();
+      }
       PartitionRangesForPartitionColumns missingParts = new PartitionRangesForPartitionColumns();
       boolean noPartsForRange = false;
       Set<String> unsupportedTimeDims = Sets.newHashSet();
@@ -506,7 +515,7 @@ class StorageTableResolver implements ContextRewriter {
   }
 
   private Set<FactPartition> getPartitions(CubeFactTable fact, TimeRange range,
-    HashMap<String, SkipStorageCause> skipStorageCauses,
+    Map<String, SkipStorageCause> skipStorageCauses,
     PartitionRangesForPartitionColumns missingPartitions) throws LensException {
     try {
       return getPartitions(fact, range, getValidUpdatePeriods(fact), true, failOnPartialData, skipStorageCauses,
@@ -564,11 +573,13 @@ class StorageTableResolver implements ContextRewriter {
     Iterator<String> it = storageTbls.iterator();
     while (it.hasNext()) {
       String storageTableName = it.next();
-      if (!client.partColExists(storageTableName, partCol)) {
+      if (!isStorageTableCandidateForRange(storageTableName, fromDate, toDate)) {
+        skipStorageCauses.put(storageTableName, new SkipStorageCause(RANGE_NOT_ANSWERABLE));
+        it.remove();
+      } else if (!client.partColExists(storageTableName, partCol)) {
         log.info("{} does not exist in {}", partCol, storageTableName);
         skipStorageCauses.put(storageTableName, SkipStorageCause.partColDoesNotExist(partCol));
         it.remove();
-        continue;
       }
     }
 
@@ -683,6 +694,32 @@ class StorageTableResolver implements ContextRewriter {
         updatePeriods, addNonExistingParts, failOnPartialData, skipStorageCauses, missingPartitions);
   }
 
+  private boolean isStorageTableCandidateForRange(String storageTableName, Date fromDate, Date toDate) throws
+    HiveException, LensException {
+    Date now = new Date();
+    String startProperty = client.getTable(storageTableName).getProperty(getStoragetableStartTimesKey());
+    if (StringUtils.isNotBlank(startProperty)) {
+      for (String timeStr : startProperty.split("\\s*,\\s*")) {
+        if (toDate.before(DateUtil.resolveDate(timeStr, now))) {
+          log.info("from date {} is before validity start time: {}, hence discarding {}",
+            toDate, timeStr, storageTableName);
+          return false;
+        }
+      }
+    }
+    String endProperty = client.getTable(storageTableName).getProperty(getStoragetableEndTimesKey());
+    if (StringUtils.isNotBlank(endProperty)) {
+      for (String timeStr : endProperty.split("\\s*,\\s*")) {
+        if (fromDate.after(DateUtil.resolveDate(timeStr, now))) {
+          log.info("to date {} is after validity end time: {}, hence discarding {}",
+            fromDate, timeStr, storageTableName);
+          return false;
+        }
+      }
+    }
+    return true;
+  }
+
   private void updateFactPartitionStorageTablesFrom(CubeFactTable fact,
     FactPartition part, Set<String> storageTableNames) throws LensException, HiveException, ParseException {
     for (String storageTableName : storageTableNames) {

http://git-wip-us.apache.org/repos/asf/lens/blob/3ed191ac/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index dc11b4c..826f6b6 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -1309,8 +1309,8 @@ public class CubeTestSetup {
     updates.add(QUARTERLY);
     updates.add(YEARLY);
 
-    ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
-    List<String> timePartCols = new ArrayList<String>();
+    ArrayList<FieldSchema> partCols = new ArrayList<>();
+    List<String> timePartCols = new ArrayList<>();
     partCols.add(TestCubeMetastoreClient.getDatePartition());
     timePartCols.add(TestCubeMetastoreClient.getDatePartitionKey());
 
@@ -1319,20 +1319,25 @@ public class CubeTestSetup {
     s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
     s1.setPartCols(partCols);
     s1.setTimePartCols(timePartCols);
+    s1.setTblProps(new HashMap<String, String>());
+    s1.getTblProps().put(MetastoreUtil.getStoragetableStartTimesKey(), "2000, now - 10 years");
+    s1.getTblProps().put(MetastoreUtil.getStoragetableEndTimesKey(), "now - 5 years, 2010");
 
     StorageTableDesc s2 = new StorageTableDesc();
     s2.setInputFormat(TextInputFormat.class.getCanonicalName());
     s2.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
-    ArrayList<FieldSchema> s2PartCols = new ArrayList<FieldSchema>();
+    ArrayList<FieldSchema> s2PartCols = new ArrayList<>();
     s2PartCols.add(new FieldSchema("ttd", serdeConstants.STRING_TYPE_NAME, "test date partition"));
     s2PartCols.add(new FieldSchema("ttd2", serdeConstants.STRING_TYPE_NAME, "test date partition"));
     s2.setPartCols(s2PartCols);
     s2.setTimePartCols(Arrays.asList("ttd", "ttd2"));
 
     storageAggregatePeriods.put(c99, updates);
+    storageAggregatePeriods.put(c0, updates);
 
-    Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
+    Map<String, StorageTableDesc> storageTables = new HashMap<>();
     storageTables.put(c99, s2);
+    storageTables.put(c0, s1);
     // create cube fact
     client.createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 0L,
       factValidityProperties, storageTables);

http://git-wip-us.apache.org/repos/asf/lens/blob/3ed191ac/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index d7484d8..4acd063 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -463,7 +463,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
   @Test
   public void testCubeWhereQueryWithMultipleTablesForMonth() throws Exception {
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "");
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C0,C1,C2,C3,C4,C5");
     conf.set(CubeQueryConfUtil.getValidStorageTablesKey("testfact"), "");
     conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C1"), "HOURLY");
     conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact2", "C1"), "YEARLY");
@@ -990,6 +990,9 @@ public class TestCubeRewriter extends TestQueryRewrite {
       MISSING_PARTITIONS);
     assertEquals(pruneCauses.getDetails().get("cheapfact").iterator().next().getCause(),
       NO_CANDIDATE_STORAGES);
+    CandidateTablePruneCause cheapFactPruneCauses = pruneCauses.getDetails().get("cheapfact").iterator().next();
+    assertEquals(cheapFactPruneCauses.getStorageCauses().get("c0").getCause(), SkipStorageCode.RANGE_NOT_ANSWERABLE);
+    assertEquals(cheapFactPruneCauses.getStorageCauses().get("c99").getCause(), SkipStorageCode.UNSUPPORTED);
     assertEquals(pruneCauses.getDetails().get("summary4").iterator().next().getCause(), TIMEDIM_NOT_SUPPORTED);
     assertTrue(pruneCauses.getDetails().get("summary4").iterator().next().getUnsupportedTimeDims().contains("d_time"));
   }


[39/50] [abbrv] lens git commit: LENS-851 : Replace columns with aliases in where clause of the inner query

Posted by sh...@apache.org.
LENS-851 : Replace columns with aliases in where clause of the inner query


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/bf4c0bec
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/bf4c0bec
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/bf4c0bec

Branch: refs/heads/LENS-581
Commit: bf4c0bec023307417de75f4c13ed1c344fc1f06e
Parents: ff891e2
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Sat Dec 12 15:30:23 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Sat Dec 12 15:30:23 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/lens-errors.conf    | 25 +++++--
 .../lens/cube/error/LensCubeErrorCode.java      |  6 +-
 .../apache/lens/cube/metadata/ExprColumn.java   | 60 ++++++++++-------
 .../lens/cube/metadata/MetastoreUtil.java       | 16 +++++
 .../apache/lens/cube/parse/CandidateFact.java   |  8 +--
 .../apache/lens/cube/parse/GroupbyResolver.java | 15 +----
 .../org/apache/lens/cube/parse/HQLParser.java   | 11 +++-
 .../lens/cube/parse/SingleFactHQLContext.java   |  8 +--
 .../parse/SingleFactMultiStorageHQLContext.java | 68 +++++++++++++++-----
 .../apache/lens/cube/parse/UnionHQLContext.java |  2 +-
 .../cube/metadata/TestCubeMetastoreClient.java  | 10 +--
 .../lens/cube/metadata/TestExprColumn.java      | 20 +++---
 .../apache/lens/cube/parse/CubeTestSetup.java   |  2 +-
 .../lens/cube/parse/TestCubeRewriter.java       |  4 +-
 .../apache/lens/server/metastore/JAXBUtils.java |  8 +--
 15 files changed, 161 insertions(+), 102 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-api/src/main/resources/lens-errors.conf
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/lens-errors.conf b/lens-api/src/main/resources/lens-errors.conf
index ca8562f..c880543 100644
--- a/lens-api/src/main/resources/lens-errors.conf
+++ b/lens-api/src/main/resources/lens-errors.conf
@@ -284,9 +284,15 @@ lensCubeErrorsForQuery = [
   }
 
   {
-      errorCode = 3031
-      httpStatusCode = ${BAD_REQUEST}
-      errorMsg = "The query is answerable from two storages but union is disabled."
+    errorCode = 3031
+    httpStatusCode = ${BAD_REQUEST}
+    errorMsg = "The query is answerable from two storages but union is disabled."
+  }
+
+  {
+    errorCode = 3032
+    httpStatusCode = ${INTERNAL_SERVER_ERROR}
+    errorMsg = "Could not parse expression %s"
   }
 ]
 
@@ -298,10 +304,17 @@ lensCubeErrorsForMetastore = [
   }
 
   {
-      errorCode = 3102
-      httpStatusCode = ${BAD_REQUEST}
-      errorMsg = "No timeline found for fact=%s, storage=%s, update period=%s, partition column=%s."
+    errorCode = 3102
+    httpStatusCode = ${BAD_REQUEST}
+    errorMsg = "No timeline found for fact=%s, storage=%s, update period=%s, partition column=%s."
   }
+
+  {
+    errorCode = 3103
+    httpStatusCode = ${BAD_REQUEST}
+    errorMsg = "The Expression %s is Not Parsable."
+  }
+
 ]
 
 lensCubeErrors = ${lensCubeErrorsForQuery}${lensCubeErrorsForMetastore}

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
index 6c5dc2f..68cd80b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
@@ -54,10 +54,12 @@ public enum LensCubeErrorCode {
   NO_CANDIDATE_FACT_AVAILABLE(3028, 1200),
   NO_CANDIDATE_DIM_STORAGE_TABLES(3029, 1300),
   NO_STORAGE_TABLE_AVAIABLE(3030, 1400),
-  STORAGE_UNION_DISABLED(3031, 100),
+  STORAGE_UNION_DISABLED(3031, 1500),
+  COULD_NOT_PARSE_EXPRESSION(3032, 1500),
   // Error codes greater than 3100 are errors while doing a metastore operation.
   ERROR_IN_ENTITY_DEFINITION(3101, 100),
-  TIMELINE_ABSENT(3102, 100);
+  TIMELINE_ABSENT(3102, 100),
+  EXPRESSION_NOT_PARSABLE(3103, 1500);
 
   public LensErrorInfo getLensErrorInfo() {
     return this.errorInfo;

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/metadata/ExprColumn.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/ExprColumn.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/ExprColumn.java
index b418517..da87e31 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/ExprColumn.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/ExprColumn.java
@@ -23,17 +23,14 @@ import java.io.UnsupportedEncodingException;
 import java.util.*;
 
 import org.apache.lens.cube.parse.HQLParser;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.ParseException;
 
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-import lombok.Setter;
-import lombok.ToString;
+import lombok.*;
 
 public class ExprColumn extends CubeColumn {
   public static final char EXPRESSION_DELIMITER = '|';
@@ -46,11 +43,11 @@ public class ExprColumn extends CubeColumn {
   private int hashCode;
 
   // for backward compatibility
-  public ExprColumn(FieldSchema column, String displayString, String expression) {
+  public ExprColumn(FieldSchema column, String displayString, String expression) throws LensException {
     this(column, displayString, new ExprSpec(expression, null, null));
   }
 
-  public ExprColumn(FieldSchema column, String displayString, ExprSpec... expressions) {
+  public ExprColumn(FieldSchema column, String displayString, ExprSpec... expressions) throws LensException {
     super(column.getName(), column.getComment(), displayString, null, null, 0.0);
 
     if (expressions == null || expressions.length == 0) {
@@ -124,6 +121,7 @@ public class ExprColumn extends CubeColumn {
   public static class ExprSpec {
     @Getter
     @Setter
+    @NonNull
     private String expr;
     @Getter
     @Setter
@@ -136,20 +134,18 @@ public class ExprColumn extends CubeColumn {
     private boolean hasHashCode = false;
     private transient int hashCode;
 
-    public ExprSpec(String expr, Date startTime, Date endTime) {
+    public ExprSpec(@NonNull String expr, Date startTime, Date endTime) throws LensException {
       this.expr = expr;
       this.startTime = startTime;
       this.endTime = endTime;
+      // validation
+      getASTNode();
     }
 
-    public synchronized ASTNode getASTNode() {
+    public synchronized ASTNode getASTNode() throws LensException {
       if (astNode == null) {
-        try {
-          if (StringUtils.isNotBlank(expr)) {
-            astNode = HQLParser.parseExpr(getExpr());
-          }
-        } catch (ParseException e) {
-          throw new IllegalArgumentException("Expression can't be parsed: " + getExpr(), e);
+        if (StringUtils.isNotBlank(expr)) {
+          astNode = MetastoreUtil.parseExpr(getExpr());
         }
       }
       return astNode;
@@ -160,8 +156,14 @@ public class ExprColumn extends CubeColumn {
       if (!hasHashCode) {
         final int prime = 31;
         int result = 1;
-        if (getASTNode() != null) {
-          String exprNormalized = HQLParser.getString(getASTNode());
+        ASTNode astNode;
+        try {
+          astNode = getASTNode();
+        } catch (LensException e) {
+          throw new IllegalArgumentException(e);
+        }
+        if (astNode != null) {
+          String exprNormalized = HQLParser.getString(astNode);
           result = prime * result + exprNormalized.hashCode();
         }
         result = prime * result + ((getStartTime() == null) ? 0 : COLUMN_TIME_FORMAT.get().format(
@@ -262,9 +264,17 @@ public class ExprColumn extends CubeColumn {
       return false;
     }
     // Compare expressions for both - compare ASTs
-    List<ASTNode> myExpressions = getExpressionASTList();
-    List<ASTNode> otherExpressions = other.getExpressionASTList();
-
+    List<ASTNode> myExpressions, otherExpressions;
+    try {
+      myExpressions = getExpressionASTList();
+    } catch (LensException e) {
+      throw new IllegalArgumentException(e);
+    }
+    try {
+      otherExpressions = other.getExpressionASTList();
+    } catch (LensException e) {
+      throw new IllegalArgumentException(e);
+    }
     for (int i = 0; i < myExpressions.size(); i++) {
       if (!HQLParser.equalsAST(myExpressions.get(i), otherExpressions.get(i))) {
         return false;
@@ -316,11 +326,11 @@ public class ExprColumn extends CubeColumn {
    *
    * @return the ast
    */
-  public ASTNode getAst() {
+  public ASTNode getAst() throws LensException {
     return getExpressionASTList().get(0);
   }
 
-  public List<ASTNode> getExpressionASTList() {
+  public List<ASTNode> getExpressionASTList() throws LensException {
     synchronized (expressionSet) {
       if (astNodeList.isEmpty()) {
         for (ExprSpec expr : expressionSet) {
@@ -366,15 +376,15 @@ public class ExprColumn extends CubeColumn {
    * Add an expression to existing set of expressions for this column
    *
    * @param expression
-   * @throws ParseException
+   * @throws LensException
    */
-  public void addExpression(ExprSpec expression) throws ParseException {
+  public void addExpression(ExprSpec expression) throws LensException {
     if (expression == null || expression.getExpr().isEmpty()) {
       throw new IllegalArgumentException("Empty expression not allowed");
     }
 
     // Validate if expression can be correctly parsed
-    HQLParser.parseExpr(expression.getExpr());
+    MetastoreUtil.parseExpr(expression.getExpr());
     synchronized (expressionSet) {
       expressionSet.add(expression);
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
index 4ec049c..deb5368 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
@@ -19,14 +19,20 @@
 
 package org.apache.lens.cube.metadata;
 
+import static org.apache.lens.cube.error.LensCubeErrorCode.EXPRESSION_NOT_PARSABLE;
 import static org.apache.lens.cube.metadata.MetastoreConstants.*;
 
 import java.text.ParseException;
 import java.util.*;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.ParseDriver;
+import org.apache.hadoop.hive.ql.parse.ParseUtils;
 
 import com.google.common.collect.Sets;
 
@@ -536,4 +542,14 @@ public class MetastoreUtil {
     }
     return null;
   }
+  public static ASTNode parseExpr(String expr) throws LensException {
+    ParseDriver driver = new ParseDriver();
+    ASTNode tree;
+    try {
+      tree = driver.parseExpression(expr);
+    } catch (org.apache.hadoop.hive.ql.parse.ParseException e) {
+      throw new LensException(EXPRESSION_NOT_PARSABLE.getLensErrorInfo(), e, e.getMessage(), expr);
+    }
+    return ParseUtils.findRootNonNullToken(tree);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 1884bde..2338ba7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -32,7 +32,6 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.antlr.runtime.CommonToken;
@@ -159,12 +158,7 @@ public class CandidateFact implements CandidateTable {
       TimeRange range = cubeql.getTimeRanges().get(i);
       String rangeWhere = rangeToWhereClause.get(range);
       if (!StringUtils.isBlank(rangeWhere)) {
-        ASTNode rangeAST;
-        try {
-          rangeAST = HQLParser.parseExpr(rangeWhere);
-        } catch (ParseException e) {
-          throw new LensException(e);
-        }
+        ASTNode rangeAST = HQLParser.parseExpr(rangeWhere);
         rangeAST.setParent(timenodes.get(i).parent);
         timenodes.get(i).parent.setChild(timenodes.get(i).childIndex, rangeAST);
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
index 97088a1..da74713 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
@@ -31,7 +31,6 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.ParseException;
 
 import org.antlr.runtime.CommonToken;
 import org.antlr.runtime.tree.Tree;
@@ -73,12 +72,7 @@ class GroupbyResolver implements ContextRewriter {
 
         if (!groupByExprs.contains(expr)) {
           if (!cubeql.isAggregateExpr(expr)) {
-            ASTNode exprAST;
-            try {
-              exprAST = HQLParser.parseExpr(expr);
-            } catch (ParseException e) {
-              throw new LensException(e);
-            }
+            ASTNode exprAST = HQLParser.parseExpr(expr);
             ASTNode groupbyAST = cubeql.getGroupByAST();
             if (!isConstantsUsed(exprAST)) {
               if (groupbyAST != null) {
@@ -140,12 +134,7 @@ class GroupbyResolver implements ContextRewriter {
     int index = 0;
     for (String expr : groupByExprs) {
       if (!contains(cubeql, selectExprs, expr)) {
-        ASTNode exprAST;
-        try {
-          exprAST = HQLParser.parseExpr(expr);
-        } catch (ParseException e) {
-          throw new LensException(e);
-        }
+        ASTNode exprAST = HQLParser.parseExpr(expr);
         addChildAtIndex(index, cubeql.getSelectAST(), exprAST);
         index++;
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index 9a9d134..7cea7d5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -18,10 +18,10 @@
  */
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.error.LensCubeErrorCode.COULD_NOT_PARSE_EXPRESSION;
 import static org.apache.lens.cube.error.LensCubeErrorCode.SYNTAX_ERROR;
 
 import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.Number;
 
 import java.io.IOException;
 import java.lang.reflect.Field;
@@ -170,9 +170,14 @@ public final class HQLParser {
     return tree;
   }
 
-  public static ASTNode parseExpr(String expr) throws ParseException {
+  public static ASTNode parseExpr(String expr) throws LensException {
     ParseDriver driver = new ParseDriver();
-    ASTNode tree = driver.parseExpression(expr);
+    ASTNode tree;
+    try {
+      tree = driver.parseExpression(expr);
+    } catch (ParseException e) {
+      throw new LensException(COULD_NOT_PARSE_EXPRESSION.getLensErrorInfo(), e, e.getMessage());
+    }
     return ParseUtils.findRootNonNullToken(tree);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
index f7271e5..de52b0a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
@@ -26,7 +26,6 @@ import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.ParseException;
 
 /**
  * HQL context class which passes down all query strings to come from DimOnlyHQLContext and works with fact being
@@ -67,12 +66,7 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
           String rangeWhere = entry.getKey();
 
           if (!StringUtils.isBlank(rangeWhere)) {
-            ASTNode rangeAST;
-            try {
-              rangeAST = HQLParser.parseExpr(rangeWhere);
-            } catch (ParseException e) {
-              throw new LensException(e);
-            }
+            ASTNode rangeAST = HQLParser.parseExpr(rangeWhere);
             rangeAST.setParent(range.getParent());
             range.getParent().setChild(range.getChildIndex(), rangeAST);
           }

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
index 418ef5a..96b1d05 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
@@ -19,7 +19,7 @@
 
 package org.apache.lens.cube.parse;
 
-import static org.apache.lens.cube.parse.HQLParser.getString;
+import static org.apache.lens.cube.parse.HQLParser.*;
 
 import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
@@ -66,7 +66,7 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     @Override
     public int hashCode() {
       if (!hashCodeComputed) {
-        hashCode = HQLParser.getString(ast).hashCode();
+        hashCode = getString(ast).hashCode();
         hashCodeComputed = true;
       }
       return hashCode;
@@ -74,8 +74,8 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
 
     @Override
     public boolean equals(Object o) {
-      return o instanceof HashableASTNode && this.hashCode() == o.hashCode() && HQLParser.getString(this.getAST())
-        .trim().equalsIgnoreCase(HQLParser.getString(((HashableASTNode) o).getAST()).trim());
+      return o instanceof HashableASTNode && this.hashCode() == o.hashCode() && getString(this.getAST())
+        .trim().equalsIgnoreCase(getString(((HashableASTNode) o).getAST()).trim());
     }
   }
 
@@ -86,6 +86,7 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     super(query, fact);
     processSelectAST();
     processGroupByAST();
+    processWhereAST();
     processHavingAST();
     processOrderByAST();
     processLimit();
@@ -94,10 +95,10 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
 
   private void processSelectAST() {
     query.getSelectFinalAliases().clear();
-    ASTNode originalSelectAST = HQLParser.copyAST(query.getSelectAST());
+    ASTNode originalSelectAST = copyAST(query.getSelectAST());
     query.setSelectAST(new ASTNode(originalSelectAST.getToken()));
     ASTNode outerSelectAST = processExpression(originalSelectAST);
-    setSelect(HQLParser.getString(outerSelectAST));
+    setSelect(getString(outerSelectAST));
   }
 
   private void processGroupByAST() {
@@ -106,16 +107,25 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     }
   }
 
+  private void processWhereAST() throws LensException {
+    for (String storageTable : fact.getStorgeWhereClauseMap().keySet()) {
+      ASTNode tree = parseExpr(fact.getStorgeWhereClauseMap().get(storageTable));
+      ASTNode replaced = replaceAST(tree);
+      fact.getStorgeWhereClauseMap().put(storageTable, getString(replaced));
+    }
+  }
+
   private void processHavingAST() throws LensException {
     if (query.getHavingAST() != null) {
-      setHaving(HQLParser.getString(processExpression(query.getHavingAST())));
+      setHaving(getString(processExpression(query.getHavingAST())));
       query.setHavingAST(null);
     }
   }
 
+
   private void processOrderByAST() {
     if (query.getOrderByAST() != null) {
-      setOrderby(HQLParser.getString(processExpression(query.getOrderByAST())));
+      setOrderby(getString(processExpression(query.getOrderByAST())));
       query.setOrderByAST(null);
     }
   }
@@ -124,6 +134,7 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     setLimit(query.getLimitValue());
     query.setLimitValue(null);
   }
+
   /*
   Perform a DFS on the provided AST, and Create an AST of similar structure with changes specific to the
   inner query - outer query dynamics. The resultant AST is supposed to be used in outer query.
@@ -147,11 +158,11 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     if (astNode == null) {
       return null;
     }
-    if (innerToOuterASTs.containsKey(new HashableASTNode(astNode))) {
-      return innerToOuterASTs.get(new HashableASTNode(astNode));
-    }
-    if (HQLParser.isAggregateAST(astNode)) {
-      ASTNode innerSelectASTWithoutAlias = HQLParser.copyAST(astNode);
+    if (isAggregateAST(astNode)) {
+      if (innerToOuterASTs.containsKey(new HashableASTNode(astNode))) {
+        return innerToOuterASTs.get(new HashableASTNode(astNode));
+      }
+      ASTNode innerSelectASTWithoutAlias = copyAST(astNode);
       ASTNode innerSelectExprAST = new ASTNode(new CommonToken(HiveParser.TOK_SELEXPR));
       innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
       String alias = decideAlias(astNode);
@@ -164,8 +175,11 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
       outerAST.addChild(dotAST);
       innerToOuterASTs.put(new HashableASTNode(innerSelectASTWithoutAlias), outerAST);
       return outerAST;
-    } else if (HQLParser.isTableColumnAST(astNode)) {
-      ASTNode innerSelectASTWithoutAlias = HQLParser.copyAST(astNode);
+    } else if (isTableColumnAST(astNode)) {
+      if (innerToOuterASTs.containsKey(new HashableASTNode(astNode))) {
+        return innerToOuterASTs.get(new HashableASTNode(astNode));
+      }
+      ASTNode innerSelectASTWithoutAlias = copyAST(astNode);
       ASTNode innerSelectExprAST = new ASTNode(new CommonToken(HiveParser.TOK_SELEXPR));
       innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
       String alias = decideAlias(astNode);
@@ -186,6 +200,30 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     }
   }
 
+  /**
+   * Transforms the inner query's AST so that aliases are used now instead of column names.
+   * Does so in-place, without creating new ASTNode instances.
+   * @param astNode inner query's AST Node to transform
+   * @return Transformed AST Node.
+   */
+  private ASTNode replaceAST(ASTNode astNode) {
+    if (astNode == null) {
+      return null;
+    }
+    if (isAggregateAST(astNode) || isTableColumnAST(astNode)) {
+      if (innerToOuterASTs.containsKey(new HashableASTNode(astNode))) {
+        ASTNode ret = innerToOuterASTs.get(new HashableASTNode(astNode));
+        // Set parent null for quicker GC
+        astNode.setParent(null);
+        return ret;
+      }
+    }
+    for (int i = 0; i < astNode.getChildCount(); i++) {
+      astNode.setChild(i, replaceAST((ASTNode) astNode.getChild(i)));
+    }
+    return astNode;
+  }
+
   private void addToInnerSelectAST(ASTNode selectExprAST) {
     if (query.getSelectAST() == null) {
       query.setSelectAST(new ASTNode(new CommonToken(TOK_SELECT)));

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
index c9ba561..e6ee989 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
@@ -33,7 +33,7 @@ import lombok.RequiredArgsConstructor;
 @RequiredArgsConstructor
 public abstract class UnionHQLContext extends SimpleHQLContext {
   protected final CubeQueryContext query;
-  private final CandidateFact fact;
+  protected final CandidateFact fact;
 
   List<HQLContextInterface> hqlContexts = new ArrayList<>();
 

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index c6ce6ad..0fef13f 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -43,11 +43,7 @@ import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.Partition;
-import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.metadata.*;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
@@ -305,7 +301,7 @@ public class TestCubeMetastoreClient {
       new DerivedCube(derivedCubeNameWithProps, measures, dimensions, CUBE_PROPERTIES, 0L, cubeWithProps);
   }
 
-  private static void defineUberDims() {
+  private static void defineUberDims() throws LensException {
     // Define zip dimension
     zipAttrs.add(new BaseDimAttribute(new FieldSchema("zipcode", "int", "code")));
     zipAttrs.add(new BaseDimAttribute(new FieldSchema("f1", "string", "field1")));
@@ -411,7 +407,7 @@ public class TestCubeMetastoreClient {
       expr1.setExpr("contact(countrydim.name");
       stateCountryExpr.addExpression(expr1);
       fail("Expected add expression to fail because of syntax error");
-    } catch (ParseException exc) {
+    } catch (LensException exc) {
       // Pass
     }
     city.alterExpression(stateCountryExpr);

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestExprColumn.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestExprColumn.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestExprColumn.java
index 8770f1a..0153b2d 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestExprColumn.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestExprColumn.java
@@ -28,7 +28,9 @@ import java.util.Iterator;
 import java.util.Map;
 import java.util.TimeZone;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 
@@ -142,7 +144,7 @@ public class TestExprColumn {
   }
 
   @Test
-  public void testExprColumnCreationErrors() {
+  public void testExprColumnCreationErrors() throws LensException {
     FieldSchema colSchema = new FieldSchema("errorColumn", "double", "multi exprcol");
 
     // no expression spec passed
@@ -157,16 +159,16 @@ public class TestExprColumn {
     try {
       ExprColumn col1 = new ExprColumn(colSchema, "NoExprInExprSpec", new ExprSpec(null, null, null));
       fail(col1 + " should not be created");
-    } catch (IllegalArgumentException e) {
-      assertTrue(e.getMessage().contains("No expression string specified for column errorColumn at index:0"));
+    } catch (NullPointerException e) {
+      // pass
     }
 
     // Parse error in expr passed in exprspec
     try {
       ExprColumn col1 = new ExprColumn(colSchema, "NoExprInExprSpec", new ExprSpec("(a+b", null, null));
       fail(col1 + " should not be created");
-    } catch (IllegalArgumentException e) {
-      assertTrue(e.getMessage().contains("Expression can't be parsed: (a+b"), e.getMessage());
+    } catch (LensException e) {
+      assertEquals(e.getErrorCode(), LensCubeErrorCode.EXPRESSION_NOT_PARSABLE.getLensErrorInfo().getErrorCode());
     }
 
     // Parse error in expr passed in exprspec
@@ -174,8 +176,8 @@ public class TestExprColumn {
       ExprColumn col1 = new ExprColumn(colSchema, "NoExprInExprSpec", new ExprSpec("a + b", null, null),
         new ExprSpec("(a+b", null, null));
       fail(col1 + " should not be created");
-    } catch (IllegalArgumentException e) {
-      assertTrue(e.getMessage().contains("Expression can't be parsed: (a+b"));
+    } catch (LensException e) {
+      assertEquals(e.getErrorCode(), LensCubeErrorCode.EXPRESSION_NOT_PARSABLE.getLensErrorInfo().getErrorCode());
     }
 
     // no expression passed in exprspec
@@ -183,8 +185,8 @@ public class TestExprColumn {
       ExprColumn col1 = new ExprColumn(colSchema, "NoExprInExprSpecAt1", new ExprSpec("a + b", null, null),
         new ExprSpec(null, null, null));
       fail(col1 + " should not be created");
-    } catch (IllegalArgumentException e) {
-      assertTrue(e.getMessage().contains("No expression string specified for column errorColumn at index:1"));
+    } catch (NullPointerException e) {
+      // pass
     }
 
     // startTime after endTime

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 2a50d74..3f01dbe 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -1561,7 +1561,7 @@ public class CubeTestSetup {
   }
 
   // DimWithTwoStorages
-  private void createCityTable(CubeMetastoreClient client) throws HiveException, ParseException {
+  private void createCityTable(CubeMetastoreClient client) throws HiveException, ParseException, LensException {
     Set<CubeDimAttribute> cityAttrs = new HashSet<CubeDimAttribute>();
     cityAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "code")));
     cityAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "city name")));

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 3be9406..0f05556 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -416,7 +416,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
       conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, true);
 
       hqlQuery = rewrite("select cityid as `City ID`, msr8, msr7 as `Third measure` "
-        + "from testCube where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
+        + "from testCube where cityid = 'a' and zipcode = 'b' and " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
 
       expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider,
         "SELECT testcube.alias0 as `City ID`, sum(testcube.alias1) + max(testcube.alias2), "
@@ -426,7 +426,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         "select testcube.cityid as `alias0`, sum(testcube.msr2) as `alias1`, "
           + "max(testcube.msr3) as `alias2`, "
           + "sum(case when testcube.cityid = 'x' then testcube.msr21 else testcube.msr22 end) as `alias3`",
-        null, "group by testcube.cityid");
+        "testcube.alias0 = 'a' and testcube.zipcode = 'b'", "group by testcube.cityid");
 
       compareQueries(hqlQuery, expected);
 

http://git-wip-us.apache.org/repos/asf/lens/blob/bf4c0bec/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
index a5883f7..817c84c 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
@@ -316,7 +316,7 @@ public final class JAXBUtils {
     return xes;
   }
 
-  private static ExprSpec[] exprSpecFromXExprColumn(Collection<XExprSpec> xesList) {
+  private static ExprSpec[] exprSpecFromXExprColumn(Collection<XExprSpec> xesList) throws LensException {
     List<ExprSpec> esArray = new ArrayList<ExprSpec>(xesList.size());
     for (XExprSpec xes : xesList) {
       esArray.add(new ExprSpec(xes.getExpr(), getDateFromXML(xes.getStartTime()), getDateFromXML(xes.getEndTime())));
@@ -478,7 +478,7 @@ public final class JAXBUtils {
     return jc;
   }
 
-  public static ExprColumn hiveExprColumnFromXExprColumn(XExprColumn xe) {
+  public static ExprColumn hiveExprColumnFromXExprColumn(XExprColumn xe) throws LensException {
     ExprColumn ec = new ExprColumn(new FieldSchema(xe.getName(), xe.getType().toLowerCase(),
       xe.getDescription()),
       xe.getDisplayString(),
@@ -598,7 +598,7 @@ public final class JAXBUtils {
       return null;
     }
 
-    Storage storage = null;
+    Storage storage;
     try {
       Class<?> clazz = Class.forName(xs.getClassname());
       Constructor<?> constructor = clazz.getConstructor(String.class);
@@ -924,7 +924,7 @@ public final class JAXBUtils {
     return ret;
   }
 
-  public static Dimension dimensionFromXDimension(XDimension dimension) {
+  public static Dimension dimensionFromXDimension(XDimension dimension) throws LensException {
     Set<CubeDimAttribute> dims = new LinkedHashSet<CubeDimAttribute>();
     for (XDimAttribute xd : dimension.getAttributes().getDimAttribute()) {
       dims.add(hiveDimAttrFromXDimAttr(xd));


[11/50] [abbrv] lens git commit: LENS-870 : Expressions in multi fact query is not rewritten properly

Posted by sh...@apache.org.
LENS-870 : Expressions in multi fact query is not rewritten properly


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/09baa128
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/09baa128
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/09baa128

Branch: refs/heads/LENS-581
Commit: 09baa12880ba27c00323bbf881e9c69acb580117
Parents: d820c32
Author: Sushil Mohanty <su...@gmail.com>
Authored: Tue Nov 24 12:05:05 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Tue Nov 24 12:05:05 2015 +0530

----------------------------------------------------------------------
 .../lens/cube/parse/CandidateTableResolver.java |  3 ++-
 .../lens/cube/parse/ExpressionResolver.java     |  6 ++---
 .../apache/lens/cube/parse/CubeTestSetup.java   |  2 ++
 .../lens/cube/parse/TestBaseCubeQueries.java    | 27 ++++++++++++++++++++
 4 files changed, 34 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/09baa128/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
index 0ad7610..38ff5a4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
@@ -272,7 +272,8 @@ class CandidateTableResolver implements ContextRewriter {
         if (!checkForColumnExists(cfact, queriedMsrs)
           && (cubeql.getQueriedExprsWithMeasures().isEmpty()
             || cubeql.getExprCtx().allNotEvaluable(cubeql.getQueriedExprsWithMeasures(), cfact))) {
-          log.info("Not considering fact table:{} as columns {} is not available", cfact, queriedMsrs);
+          log.info("Not considering fact table:{} as columns {},{} is not available", cfact, queriedMsrs,
+                  cubeql.getQueriedExprsWithMeasures());
           cubeql.addFactPruningMsgs(cfact.fact, CandidateTablePruneCause.columnNotFound(queriedMsrs,
             cubeql.getQueriedExprsWithMeasures()));
           toRemove = true;

http://git-wip-us.apache.org/repos/asf/lens/blob/09baa128/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 1a347b2..200a48c 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -405,11 +405,11 @@ class ExpressionResolver implements ContextRewriter {
      */
     public boolean allNotEvaluable(Set<String> exprs, CandidateTable cTable) {
       for (String expr : exprs) {
-        if (!isEvaluable(expr, cTable)) {
-          return true;
+        if (isEvaluable(expr, cTable)) {
+          return false;
         }
       }
-      return false;
+      return true;
     }
 
     public Collection<String> coveringExpressions(Set<String> exprs, CandidateTable cTable) {

http://git-wip-us.apache.org/repos/asf/lens/blob/09baa128/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index aa15a2c..999faa0 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -732,6 +732,8 @@ public class CubeTestSetup {
       "round(msr1/1000)"));
     exprs.add(new ExprColumn(new FieldSchema("roundedmsr2", "double", "rounded measure2"), "Rounded msr2",
       "round(msr2/1000)"));
+    exprs.add(new ExprColumn(new FieldSchema("flooredmsr12", "double", "floored measure12"), "Floored msr12",
+            "floor(msr12)"));
     exprs.add(new ExprColumn(new FieldSchema("nestedexpr", "double", "nested expr"), "Nested expr",
       new ExprSpec("avg(roundedmsr2)", null, null), new ExprSpec("avg(equalsums)", null, null),
       new ExprSpec("case when substrexpr = 'xyz' then avg(msr5) when substrexpr = 'abc' then avg(msr4)/100 end",

http://git-wip-us.apache.org/repos/asf/lens/blob/09baa128/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 548bf5c..1ea22b7 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -226,6 +226,33 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
   }
 
   @Test
+  public void testMultiFactQueryWithExpressionsFromMultipleFacts() throws Exception {
+    Configuration tConf = new Configuration(conf);
+    tConf.setBoolean(CubeQueryConfUtil.LIGHTEST_FACT_FIRST, true);
+    String hqlQuery = rewrite("select  dim1, roundedmsr2, flooredmsr12 from basecube" + " where "
+            + TWO_DAYS_RANGE, tConf);
+    String expected1 =
+            getExpectedQuery(cubeName, "select basecube.dim1 as `dim1`, "
+                            + "floor(sum(( basecube . msr12 ))) as `flooredmsr12` FROM ", null,
+                    " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    String expected2 = getExpectedQuery(cubeName,
+            "select basecube.dim1 as `dim1`, round(sum(basecube.msr2)/1000) as `roundedmsr2` FROM ", null,
+            " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+    TestCubeRewriter.compareContains(expected1, hqlQuery);
+    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    String lower = hqlQuery.toLowerCase();
+    assertTrue(
+            lower.startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, mq2.roundedmsr2 roundedmsr2, "
+                    + "mq1.flooredmsr12 flooredmsr12 from ")
+                    || lower.startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, mq1.roundedmsr2 roundedmsr2, "
+                    + "mq2.flooredmsr12 flooredmsr12"
+                    + " from "), hqlQuery);
+
+    assertTrue(hqlQuery.contains("mq1 full outer join ") && hqlQuery.endsWith("mq2 on mq1.dim1 <=> mq2.dim1"),
+            hqlQuery);
+  }
+
+  @Test
   public void testMultiFactQueryWithSingleCommonDimensionWithColumnsSwapped() throws Exception {
     // columns in select interchanged
     String hqlQuery = rewrite("select dim1, msr12, roundedmsr2 from basecube" + " where " + TWO_DAYS_RANGE, conf);


[08/50] [abbrv] lens git commit: LENS-851 : queries where results of two storage tables of same fact are unioned, the rows should be aggregated

Posted by sh...@apache.org.
http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
index 8e35ea9..db3ba9b 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQuery.java
@@ -19,22 +19,28 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.parse.HQLParser.equalsAST;
+
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.lens.server.api.error.LensException;
+
 import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
 
 import com.google.common.base.Objects;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
-
-import lombok.Getter;
-import lombok.Setter;
-
+import lombok.Data;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
 public class TestQuery {
+  private static HiveConf conf = new HiveConf();
+  private ASTNode ast;
 
   private String actualQuery;
   private String joinQueryPart = null;
@@ -46,6 +52,7 @@ public class TestQuery {
   private String preJoinQueryPart = null;
 
   private String postJoinQueryPart = null;
+  private boolean processed = false;
 
   public enum JoinType {
     INNERJOIN,
@@ -54,32 +61,45 @@ public class TestQuery {
     FULLOUTERJOIN,
     UNIQUE,
     LEFTSEMIJOIN,
-    JOIN;
+    JOIN
   }
 
   public enum Clause {
     WHERE,
     GROUPBY,
     HAVING,
-    ORDEREDBY;
+    ORDEREDBY
   }
 
   public TestQuery(String query) {
     this.actualQuery = query;
-    this.trimmedQuery = getTrimmedQuery(query);
-    this.joinQueryPart = extractJoinStringFromQuery(trimmedQuery);
-    /**
-     * Get the join query part, pre-join query and post-join query part from the trimmed query.
-     *
-     */
-    if (StringUtils.isNotBlank(joinQueryPart)) {
-      this.preJoinQueryPart = trimmedQuery.substring(0, trimmedQuery.indexOf(joinQueryPart));
-      this.postJoinQueryPart = trimmedQuery.substring(getMinIndexOfClause());
-      prepareJoinStrings(trimmedQuery);
-    } else {
-      int minIndex = getMinIndexOfClause();
-      this.preJoinQueryPart = trimmedQuery.substring(0, minIndex);
-      this.postJoinQueryPart = trimmedQuery.substring(minIndex);
+  }
+
+  public ASTNode getAST() throws LensException {
+    if (this.ast == null) {
+      ast = HQLParser.parseHQL(this.actualQuery, conf);
+    }
+    return ast;
+  }
+
+  public void processQueryAsString() {
+    if (!processed) {
+      processed = true;
+      this.trimmedQuery = getTrimmedQuery(actualQuery);
+      this.joinQueryPart = extractJoinStringFromQuery(trimmedQuery);
+      /**
+       * Get the join query part, pre-join query and post-join query part from the trimmed query.
+       *
+       */
+      if (StringUtils.isNotBlank(joinQueryPart)) {
+        this.preJoinQueryPart = trimmedQuery.substring(0, trimmedQuery.indexOf(joinQueryPart));
+        this.postJoinQueryPart = trimmedQuery.substring(getMinIndexOfClause());
+        prepareJoinStrings(trimmedQuery);
+      } else {
+        int minIndex = getMinIndexOfClause();
+        this.preJoinQueryPart = trimmedQuery.substring(0, minIndex);
+        this.postJoinQueryPart = trimmedQuery.substring(minIndex);
+      }
     }
   }
 
@@ -105,11 +125,11 @@ public class TestQuery {
       query = query.substring(nextJoinIndex + joinDetails.getJoinType().name().length());
     }
   }
-
+  @Data
   private class JoinDetails {
-    @Setter @Getter private JoinType joinType;
-    @Setter @Getter private int index;
-    @Setter @Getter private String joinString;
+    private JoinType joinType;
+    private int index;
+    private String joinString;
   }
 
   /**
@@ -129,7 +149,7 @@ public class TestQuery {
     joinDetails.setIndex(nextJoinIndex);
     if (nextJoinIndex != Integer.MAX_VALUE) {
       joinDetails.setJoinString(
-          getJoinString(query.substring(nextJoinIndex + nextJoinTypePart.name().length())));
+        getJoinString(query.substring(nextJoinIndex + nextJoinTypePart.name().length())));
     }
     joinDetails.setJoinType(nextJoinTypePart);
     return joinDetails;
@@ -164,7 +184,7 @@ public class TestQuery {
       }
       minClauseIndex = clauseIndex < minClauseIndex ? clauseIndex : minClauseIndex;
     }
-    return (minClauseIndex == Integer.MAX_VALUE || minClauseIndex == -1) ? query.length() : minClauseIndex;
+    return (minClauseIndex == Integer.MAX_VALUE) ? query.length() : minClauseIndex;
   }
 
   private int getMinIndexOfJoinType() {
@@ -190,6 +210,9 @@ public class TestQuery {
 
   @Override
   public boolean equals(Object query) {
+    if (!(query instanceof TestQuery)) {
+      return false;
+    }
     TestQuery expected = (TestQuery) query;
     if (this == expected) {
       return true;
@@ -201,9 +224,23 @@ public class TestQuery {
     } else if (expected.actualQuery == null) {
       return false;
     }
-    return Objects.equal(this.joinTypeStrings, expected.joinTypeStrings)
-        && Objects.equal(this.preJoinQueryPart, expected.preJoinQueryPart)
-        && Objects.equal(this.postJoinQueryPart, expected.postJoinQueryPart);
+    boolean equals = false;
+    try {
+      equals = equalsAST(this.getAST(), expected.getAST());
+    } catch (LensException e) {
+      log.error("AST not valid", e);
+    }
+    return equals || stringEquals(expected);
+  }
+
+  private boolean stringEquals(TestQuery expected) {
+    processQueryAsString();
+    expected.processQueryAsString();
+    return new EqualsBuilder()
+      .append(this.joinTypeStrings, expected.joinTypeStrings)
+      .append(this.preJoinQueryPart, expected.preJoinQueryPart)
+      .append(this.postJoinQueryPart, expected.postJoinQueryPart)
+      .build();
   }
 
   @Override
@@ -212,9 +249,6 @@ public class TestQuery {
   }
 
   public String toString() {
-    StringBuilder sb = new StringBuilder();
-    sb.append("Actual Query: " + actualQuery).append("\n");
-    sb.append("JoinQueryString: " + joinTypeStrings);
-    return sb.toString();
+    return "Actual Query: " + actualQuery + "\n" + "JoinQueryString: " + joinTypeStrings;
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/src/site/apt/user/olap-query-conf.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/user/olap-query-conf.apt b/src/site/apt/user/olap-query-conf.apt
index 6606d42..6f84869 100644
--- a/src/site/apt/user/olap-query-conf.apt
+++ b/src/site/apt/user/olap-query-conf.apt
@@ -36,36 +36,42 @@ OLAP query configuration
 *--+--+---+--+
 |6|lens.cube.query.enable.flattening.bridge.tables|false|Flag specifies if fields selected have to be flattened or not, if they are coming from tables with many to many relationship in join. If false, field selection will be simple join and selecting the field. If true, the fields from bridge tables will be aggregated grouped by join key.|
 *--+--+---+--+
-|7|lens.cube.query.fail.if.data.partial|false|Whether to fail the query of data is partial|
+|7|lens.cube.query.enable.storages.union|false|Sometimes One storage table doesn't contain all required partitions, and the query needs to be answered from two storage tables. Enabling this (make value = <true>) allows rewrite of such queries. If it's <false>, then such queries will fail in rewrite phase. The feature should only be enabled when all the aggregate functions used in the query (explicitly or implicitly picked from default aggregates of used measures) are transitive. Transitive aggregate functions are those that follow the following property:\ |
+| |                                     |     |                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                 \ |
+| |                                     |     |<<<f(a, b, c, d) = f(f(a, b), f(c, d)) for all possible values of a,b,c,d.>>>                                                                                                                                                                                                                                                                                                                                                                                                                                                    \ |
+| |                                     |     |                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                 \ |
+| |                                     |     |e.g. SUM, MAX, MIN etc are transitive aggregate functions, while AVG, COUNT etc are not.                                                                                                                                                                                                                                                                                                                                                                                                                                           |
 *--+--+---+--+
-|8|lens.cube.query.join.type|INNER|Tells what the join type is, in-case of automatic resolution of joins is enabled|
+|8|lens.cube.query.fail.if.data.partial|false|Whether to fail the query of data is partial|
 *--+--+---+--+
-|9|lens.cube.query.lookahead.ptparts.forinterval.${interval}|1|The value of number of lookahead process time partitions for interval specified. Interval can be any Update period.|
+|9|lens.cube.query.join.type|INNER|Tells what the join type is, in-case of automatic resolution of joins is enabled|
 *--+--+---+--+
-|10|lens.cube.query.max.interval| |Maximum value of the update period that the query timed dimensions can take values of. For example, if query involves month ranges, user can say query maximum interval is daily, then no monthly partitions will be picked.|
+|10|lens.cube.query.lookahead.ptparts.forinterval.${interval}|1|The value of number of lookahead process time partitions for interval specified. Interval can be any Update period.|
 *--+--+---+--+
-|11|lens.cube.query.nonexisting.partitions| |The list of comma separated non existing partitions, if query can run with partial data. The value will be set by the cube query rewriter|
+|11|lens.cube.query.max.interval| |Maximum value of the update period that the query timed dimensions can take values of. For example, if query involves month ranges, user can say query maximum interval is daily, then no monthly partitions will be picked.|
 *--+--+---+--+
-|12|lens.cube.query.partition.where.clause.format| |The simple date format of how the queried partition should be put in where clause. If nothing is specified, it will use the format from org.apache.lens.cube.metadata.UpdatePeriod for each type of partition|
+|12|lens.cube.query.nonexisting.partitions| |The list of comma separated non existing partitions, if query can run with partial data. The value will be set by the cube query rewriter|
 *--+--+---+--+
-|13|lens.cube.query.pick.lightest.fact.first|false|If set to true, lightest fact will be resolved first than resolving storages. Otherwise, storages will be resolved to check all partitions exist and then pick lightest fact among candidates|
+|13|lens.cube.query.partition.where.clause.format| |The simple date format of how the queried partition should be put in where clause. If nothing is specified, it will use the format from org.apache.lens.cube.metadata.UpdatePeriod for each type of partition|
 *--+--+---+--+
-|14|lens.cube.query.process.time.partition.column| |The column name which is a process time column. If process time column is specified, query rewriter will look ahead the partitions of other timed dimensions inside this column.|
+|14|lens.cube.query.pick.lightest.fact.first|false|If set to true, lightest fact will be resolved first than resolving storages. Otherwise, storages will be resolved to check all partitions exist and then pick lightest fact among candidates|
 *--+--+---+--+
-|15|lens.cube.query.promote.groupby.toselect|false|Tells whether to promote group by clauses to be promoted to select expressions if they are already not projected. To enable automatic promotion, this value should be true.|
+|15|lens.cube.query.process.time.partition.column| |The column name which is a process time column. If process time column is specified, query rewriter will look ahead the partitions of other timed dimensions inside this column.|
 *--+--+---+--+
-|16|lens.cube.query.promote.select.togroupby|false|Tells whether to promote select expressions which is not inside any aggregate, to be promoted to groupby clauses, if they are already not part of groupby clauses. To enable automatic promotion, this value should be true.|
+|16|lens.cube.query.promote.groupby.toselect|false|Tells whether to promote group by clauses to be promoted to select expressions if they are already not projected. To enable automatic promotion, this value should be true.|
 *--+--+---+--+
-|17|lens.cube.query.replace.timedim|true|Tells whether timedim attribute queried in the time range should be replaced with its corresponding partition column name.|
+|17|lens.cube.query.promote.select.togroupby|false|Tells whether to promote select expressions which is not inside any aggregate, to be promoted to groupby clauses, if they are already not part of groupby clauses. To enable automatic promotion, this value should be true.|
 *--+--+---+--+
-|18|lens.cube.query.time.range.writer.class|org.apache.lens.cube.parse.ORTimeRangeWriter|The timerange writer class which specifies how the resolved partitions in timeranges should be written in final query. Available writers are org.apache.lens.cube.parse.ORTimeRangeWriter and org.apache.lens.cube.parse.BetweenTimeRangeWriter|
+|18|lens.cube.query.replace.timedim|true|Tells whether timedim attribute queried in the time range should be replaced with its corresponding partition column name.|
 *--+--+---+--+
-|19|lens.cube.query.valid.${cubename}.facttables| |List of comma separated fact tables that are valid for cube. If no value is specified, all fact tables are valid|
+|19|lens.cube.query.time.range.writer.class|org.apache.lens.cube.parse.ORTimeRangeWriter|The timerange writer class which specifies how the resolved partitions in timeranges should be written in final query. Available writers are org.apache.lens.cube.parse.ORTimeRangeWriter and org.apache.lens.cube.parse.BetweenTimeRangeWriter|
 *--+--+---+--+
-|20|lens.cube.query.valid.dim.storgaetables| |List of comma separated dimension storage tables that are valid. If no value is specified, all tables are valid|
+|20|lens.cube.query.valid.${cubename}.facttables| |List of comma separated fact tables that are valid for cube. If no value is specified, all fact tables are valid|
 *--+--+---+--+
-|21|lens.cube.query.valid.fact.${facttable}.storage.${storagename}.updateperiods| |List of comma separated update periods that are valid for a fact on a storage. If no value is specified, all update periods are valid|
+|21|lens.cube.query.valid.dim.storgaetables| |List of comma separated dimension storage tables that are valid. If no value is specified, all tables are valid|
 *--+--+---+--+
-|22|lens.cube.query.valid.fact.${facttable}.storagetables| |List of comma separated storage tables that are valid for a fact. If no value is specified, all storage tables are valid|
+|22|lens.cube.query.valid.fact.${facttable}.storage.${storagename}.updateperiods| |List of comma separated update periods that are valid for a fact on a storage. If no value is specified, all update periods are valid|
+*--+--+---+--+
+|23|lens.cube.query.valid.fact.${facttable}.storagetables| |List of comma separated storage tables that are valid for a fact. If no value is specified, all storage tables are valid|
 *--+--+---+--+
 The configuration parameters and their default values


[09/50] [abbrv] lens git commit: LENS-851 : queries where results of two storage tables of same fact are unioned, the rows should be aggregated

Posted by sh...@apache.org.
LENS-851 : queries where results of two storage tables of same fact are unioned, the rows should be aggregated


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/c445730c
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/c445730c
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/c445730c

Branch: refs/heads/LENS-581
Commit: c445730c40533b5f51d239dc501a631416a54979
Parents: 2870be7
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Thu Nov 19 10:46:50 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Thu Nov 19 10:46:50 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/lens-errors.conf    |   6 +
 .../lens/cube/error/LensCubeErrorCode.java      |   1 +
 .../lens/cube/parse/CubeQueryConfUtil.java      |   2 +
 .../lens/cube/parse/CubeQueryContext.java       |  23 +-
 .../org/apache/lens/cube/parse/HQLParser.java   |  14 +-
 .../lens/cube/parse/SimpleHQLContext.java       |  55 +---
 .../parse/SingleFactMultiStorageHQLContext.java | 193 ++++++++++++--
 .../apache/lens/cube/parse/UnionHQLContext.java |  60 +----
 .../src/main/resources/olap-query-conf.xml      |  15 ++
 .../apache/lens/cube/parse/CubeTestSetup.java   |  88 +++++--
 .../lens/cube/parse/TestAggregateResolver.java  |  72 +++---
 .../lens/cube/parse/TestCubeRewriter.java       | 252 ++++++++++++-------
 .../cube/parse/TestDenormalizationResolver.java |  30 +--
 .../lens/cube/parse/TestExpressionResolver.java |  12 +-
 .../org/apache/lens/cube/parse/TestQuery.java   | 100 +++++---
 src/site/apt/user/olap-query-conf.apt           |  38 +--
 16 files changed, 617 insertions(+), 344 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-api/src/main/resources/lens-errors.conf
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/lens-errors.conf b/lens-api/src/main/resources/lens-errors.conf
index 7526456..f50433a 100644
--- a/lens-api/src/main/resources/lens-errors.conf
+++ b/lens-api/src/main/resources/lens-errors.conf
@@ -282,6 +282,12 @@ lensCubeErrorsForQuery = [
     httpStatusCode = ${BAD_REQUEST}
     errorMsg = "No storage table available for candidate fact: %s"
   }
+
+  {
+      errorCode = 3031
+      httpStatusCode = ${BAD_REQUEST}
+      errorMsg = "The query is answerable from two storages but union is disabled."
+  }
 ]
 
 lensCubeErrorsForMetastore = [

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
index 2119b64..24fb80b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
@@ -53,6 +53,7 @@ public enum LensCubeErrorCode {
   NO_CANDIDATE_FACT_AVAILABLE(3028, 1200),
   NO_CANDIDATE_DIM_STORAGE_TABLES(3029, 1300),
   NO_STORAGE_TABLE_AVAIABLE(3030, 1400),
+  STORAGE_UNION_DISABLED(3031, 100),
   ERROR_IN_ENTITY_DEFINITION(3101, 100);
 
   public LensErrorInfo getLensErrorInfo() {

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
index 87972c8..d96b567 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
@@ -51,6 +51,8 @@ public final class CubeQueryConfUtil {
   public static final String ENABLE_SELECT_TO_GROUPBY = "lens.cube.query.promote.select.togroupby";
   public static final String ENABLE_ATTRFIELDS_ADD_DISTINCT = "lens.cube.query.enable.attrfields.add.distinct";
   public static final boolean DEFAULT_ATTR_FIELDS_ADD_DISTINCT = true;
+  public static final String ENABLE_STORAGES_UNION = "lens.cube.query.enable.storages.union";
+  public static final boolean DEFAULT_ENABLE_STORAGES_UNION = false;
 
   public static final String REPLACE_TIMEDIM_WITH_PART_COL = "lens.cube.query.replace.timedim";
   public static final boolean DEFAULT_MULTI_TABLE_SELECT = true;

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 450d172..a660133 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -19,6 +19,8 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.*;
+
 import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
 import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TABLE_OR_COL;
 import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TMP_FILE;
@@ -132,9 +134,9 @@ public class CubeQueryContext implements TrackQueriedColumns {
   protected final Map<Dimension, Set<CandidateDim>> candidateDims = new HashMap<Dimension, Set<CandidateDim>>();
 
   // query trees
-  @Getter
+  @Getter @Setter
   private ASTNode havingAST;
-  @Getter
+  @Getter @Setter
   private ASTNode selectAST;
 
   // Will be set after the Fact is picked and time ranges replaced
@@ -142,7 +144,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
   @Setter
   private ASTNode whereAST;
 
-  @Getter
+  @Getter @Setter
   private ASTNode orderByAST;
   // Setter is used in promoting the select when promotion is on.
   @Getter
@@ -667,6 +669,9 @@ public class CubeQueryContext implements TrackQueriedColumns {
   public Integer getLimitValue() {
     return qb.getParseInfo().getDestLimit(getClause());
   }
+  public void setLimitValue(Integer value) {
+    qb.getParseInfo().setDestLimit(getClause(), value);
+  }
 
   private String getStorageStringWithAlias(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, String alias) {
     if (cubeTbls.get(alias) instanceof CubeInterface) {
@@ -764,14 +769,14 @@ public class CubeQueryContext implements TrackQueriedColumns {
           }
         }
       }
-      conf.set(CubeQueryConfUtil.NON_EXISTING_PARTITIONS, partsStr);
+      conf.set(NON_EXISTING_PARTITIONS, partsStr);
     } else {
-      conf.unset(CubeQueryConfUtil.NON_EXISTING_PARTITIONS);
+      conf.unset(NON_EXISTING_PARTITIONS);
     }
   }
 
   public String getNonExistingParts() {
-    return conf.get(CubeQueryConfUtil.NON_EXISTING_PARTITIONS);
+    return conf.get(NON_EXISTING_PARTITIONS);
   }
 
   private Map<Dimension, CandidateDim> pickCandidateDimsToQuery(Set<Dimension> dimensions) throws LensException {
@@ -942,6 +947,9 @@ public class CubeQueryContext implements TrackQueriedColumns {
       return new DimOnlyHQLContext(dimsToQuery, query);
     } else if (facts.size() == 1 && facts.iterator().next().getStorageTables().size() > 1) {
       //create single fact with multiple storage context
+      if (!conf.getBoolean(ENABLE_STORAGES_UNION, DEFAULT_ENABLE_STORAGES_UNION)) {
+        throw new LensException(LensCubeErrorCode.STORAGE_UNION_DISABLED.getLensErrorInfo());
+      }
       return new SingleFactMultiStorageHQLContext(facts.iterator().next(), dimsToQuery, query);
     } else if (facts.size() == 1 && facts.iterator().next().getStorageTables().size() == 1) {
       // create single fact context
@@ -1129,8 +1137,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
   }
 
   public boolean shouldReplaceTimeDimWithPart() {
-    return getConf().getBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL,
-      CubeQueryConfUtil.DEFAULT_REPLACE_TIMEDIM_WITH_PART_COL);
+    return getConf().getBoolean(REPLACE_TIMEDIM_WITH_PART_COL, DEFAULT_REPLACE_TIMEDIM_WITH_PART_COL);
   }
 
   public String getPartitionColumnOfTimeDim(String timeDimName) {

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index 16e1aa3..9a9d134 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -30,6 +30,7 @@ import java.util.regex.Pattern;
 
 import org.apache.lens.server.api.error.LensException;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
@@ -49,8 +50,14 @@ public final class HQLParser {
   private HQLParser() {
 
   }
+
   public static final Pattern P_WSPACE = Pattern.compile("\\s+");
 
+  public static boolean isTableColumnAST(ASTNode astNode) {
+    return !(astNode == null || astNode.getChildren() == null || astNode.getChildCount() != 2) && astNode.getChild(0)
+      .getType() == HiveParser.TOK_TABLE_OR_COL && astNode.getChild(1).getType() == HiveParser.Identifier;
+  }
+
   public interface ASTNodeVisitor {
     void visit(TreeNode node) throws LensException;
   }
@@ -786,8 +793,11 @@ public final class HQLParser {
     }
 
     // Compare text. For literals, comparison is case sensitive
-    if ((n1.getToken().getType() == StringLiteral && !n1.getText().equals(n2.getText()))
-      || !n1.getText().equalsIgnoreCase(n2.getText())) {
+    if ((n1.getToken().getType() == StringLiteral && !StringUtils.equals(n1.getText(), n2.getText()))) {
+      return false;
+    }
+
+    if (!StringUtils.equalsIgnoreCase(n1.getText(), n2.getText())) {
       return false;
     }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
index 067a37a..62ceb12 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
@@ -26,6 +26,7 @@ import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 
+import lombok.Data;
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -34,6 +35,7 @@ import lombok.extern.slf4j.Slf4j;
  * Making this as an abstract class because it provides constructors without all expressions being set.
  */
 @Slf4j
+@Data
 public abstract class SimpleHQLContext implements HQLContextInterface {
 
   private String select;
@@ -131,57 +133,4 @@ public abstract class SimpleHQLContext implements HQLContextInterface {
     }
     return queryFormat.toString();
   }
-
-  public String getFrom() {
-    return from;
-  }
-
-  public String getWhere() {
-    return where;
-  }
-
-  public String getSelect() {
-    return select;
-  }
-
-  public String getGroupby() {
-    return groupby;
-  }
-
-  public String getHaving() {
-    return having;
-  }
-
-  public String getOrderby() {
-    return orderby;
-  }
-
-  public Integer getLimit() {
-    return limit;
-  }
-
-  protected void setFrom(String from) {
-    this.from = from;
-  }
-
-  protected void setWhere(String where) {
-    this.where = where;
-  }
-
-  protected void setSelect(String select) {
-    this.select = select;
-  }
-
-  protected void setGroupby(String groupby) {
-    this.groupby = groupby;
-  }
-
-  protected void setHaving(String having) {
-    this.having = having;
-  }
-
-  protected void setOrderby(String orderby) {
-    this.orderby = orderby;
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
index 15a98dd..418ef5a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
@@ -19,38 +19,203 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.parse.HQLParser.getString;
+
+import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
+
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.lens.cube.metadata.Dimension;
 import org.apache.lens.server.api.error.LensException;
 
-import lombok.Getter;
+import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+
+import org.antlr.runtime.CommonToken;
+import org.antlr.runtime.tree.Tree;
+
+import lombok.Data;
 
 
 public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
 
-  @Getter
-  private CubeQueryContext query = null;
-  private CandidateFact fact = null;
+  int aliasCounter = 0;
+
+  @Data
+  public static class HashableASTNode {
+    private ASTNode ast;
+    private int hashCode = -1;
+    private boolean hashCodeComputed = false;
+
+    public HashableASTNode(ASTNode ast) {
+      this.ast = ast;
+    }
+
+    public void setAST(ASTNode ast) {
+      this.ast = ast;
+      hashCodeComputed = false;
+    }
+
+    public ASTNode getAST() {
+      return ast;
+    }
+
+    @Override
+    public int hashCode() {
+      if (!hashCodeComputed) {
+        hashCode = HQLParser.getString(ast).hashCode();
+        hashCodeComputed = true;
+      }
+      return hashCode;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+      return o instanceof HashableASTNode && this.hashCode() == o.hashCode() && HQLParser.getString(this.getAST())
+        .trim().equalsIgnoreCase(HQLParser.getString(((HashableASTNode) o).getAST()).trim());
+    }
+  }
+
+  private Map<HashableASTNode, ASTNode> innerToOuterASTs = new HashMap<>();
 
   SingleFactMultiStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
     throws LensException {
-    this.query = query;
-    this.fact = fact;
-    setUnionContexts(fact, dimsToQuery, query);
+    super(query, fact);
+    processSelectAST();
+    processGroupByAST();
+    processHavingAST();
+    processOrderByAST();
+    processLimit();
+    setHqlContexts(getUnionContexts(fact, dimsToQuery, query));
+  }
+
+  private void processSelectAST() {
+    query.getSelectFinalAliases().clear();
+    ASTNode originalSelectAST = HQLParser.copyAST(query.getSelectAST());
+    query.setSelectAST(new ASTNode(originalSelectAST.getToken()));
+    ASTNode outerSelectAST = processExpression(originalSelectAST);
+    setSelect(HQLParser.getString(outerSelectAST));
+  }
+
+  private void processGroupByAST() {
+    if (query.getGroupByAST() != null) {
+      setGroupby(getString(processExpression(query.getGroupByAST())));
+    }
   }
 
-  private void setUnionContexts(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
+  private void processHavingAST() throws LensException {
+    if (query.getHavingAST() != null) {
+      setHaving(HQLParser.getString(processExpression(query.getHavingAST())));
+      query.setHavingAST(null);
+    }
+  }
+
+  private void processOrderByAST() {
+    if (query.getOrderByAST() != null) {
+      setOrderby(HQLParser.getString(processExpression(query.getOrderByAST())));
+      query.setOrderByAST(null);
+    }
+  }
+
+  private void processLimit() {
+    setLimit(query.getLimitValue());
+    query.setLimitValue(null);
+  }
+  /*
+  Perform a DFS on the provided AST, and Create an AST of similar structure with changes specific to the
+  inner query - outer query dynamics. The resultant AST is supposed to be used in outer query.
+
+  Base cases:
+   1. ast is null => null
+   2. ast is table.column => add this to inner select expressions, generate alias, return cube.alias. Memoize the
+            mapping table.column => cube.alias
+   3. ast is aggregate_function(table.column) => add aggregate_function(table.column) to inner select expressions,
+            generate alias, return aggregate_function(cube.alias). Memoize the mapping
+            aggregate_function(table.column) => aggregate_function(cube.alias)
+            Assumption is aggregate_function is transitive i.e. f(a,b,c,d) = f(f(a,b), f(c,d)). SUM, MAX, MIN etc
+            are transitive, while AVG, COUNT etc are not. For non-transitive aggregate functions, the re-written
+            query will be incorrect.
+   4. If given ast is memoized as mentioned in the above cases, return the mapping.
+
+   Recursive case:
+     Copy the root node, process children recursively and add as children to the copied node. Return the copied node.
+   */
+  private ASTNode processExpression(ASTNode astNode) {
+    if (astNode == null) {
+      return null;
+    }
+    if (innerToOuterASTs.containsKey(new HashableASTNode(astNode))) {
+      return innerToOuterASTs.get(new HashableASTNode(astNode));
+    }
+    if (HQLParser.isAggregateAST(astNode)) {
+      ASTNode innerSelectASTWithoutAlias = HQLParser.copyAST(astNode);
+      ASTNode innerSelectExprAST = new ASTNode(new CommonToken(HiveParser.TOK_SELEXPR));
+      innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
+      String alias = decideAlias(astNode);
+      ASTNode aliasNode = new ASTNode(new CommonToken(Identifier, alias));
+      innerSelectExprAST.addChild(aliasNode);
+      addToInnerSelectAST(innerSelectExprAST);
+      ASTNode dotAST = getDotAST(query.getCube().getName(), alias);
+      ASTNode outerAST = new ASTNode(new CommonToken(TOK_FUNCTION));
+      outerAST.addChild(new ASTNode(new CommonToken(Identifier, astNode.getChild(0).getText())));
+      outerAST.addChild(dotAST);
+      innerToOuterASTs.put(new HashableASTNode(innerSelectASTWithoutAlias), outerAST);
+      return outerAST;
+    } else if (HQLParser.isTableColumnAST(astNode)) {
+      ASTNode innerSelectASTWithoutAlias = HQLParser.copyAST(astNode);
+      ASTNode innerSelectExprAST = new ASTNode(new CommonToken(HiveParser.TOK_SELEXPR));
+      innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
+      String alias = decideAlias(astNode);
+      ASTNode aliasNode = new ASTNode(new CommonToken(Identifier, alias));
+      innerSelectExprAST.addChild(aliasNode);
+      addToInnerSelectAST(innerSelectExprAST);
+      ASTNode outerAST = getDotAST(query.getCube().getName(), alias);
+      innerToOuterASTs.put(new HashableASTNode(innerSelectASTWithoutAlias), outerAST);
+      return outerAST;
+    } else {
+      ASTNode outerHavingExpression = new ASTNode(astNode);
+      if (astNode.getChildren() != null) {
+        for (Node child : astNode.getChildren()) {
+          outerHavingExpression.addChild(processExpression((ASTNode) child));
+        }
+      }
+      return outerHavingExpression;
+    }
+  }
+
+  private void addToInnerSelectAST(ASTNode selectExprAST) {
+    if (query.getSelectAST() == null) {
+      query.setSelectAST(new ASTNode(new CommonToken(TOK_SELECT)));
+    }
+    query.getSelectAST().addChild(selectExprAST);
+  }
+
+  private ASTNode getDotAST(String tableAlias, String fieldAlias) {
+    ASTNode child = new ASTNode(new CommonToken(DOT, "."));
+    child.addChild(new ASTNode(new CommonToken(TOK_TABLE_OR_COL, "TOK_TABLE_OR_COL")));
+    child.getChild(0).addChild(new ASTNode(new CommonToken(Identifier, tableAlias)));
+    child.addChild(new ASTNode(new CommonToken(Identifier, fieldAlias)));
+    return child;
+  }
+
+  private String decideAlias(Tree child) {
+    // Can add intelligence in aliases someday. Not required though :)
+    return "alias" + (aliasCounter++);
+  }
+
+  private static ArrayList<HQLContextInterface> getUnionContexts(CandidateFact fact, Map<Dimension, CandidateDim>
+    dimsToQuery, CubeQueryContext query)
     throws LensException {
-    hqlContexts = new ArrayList<HQLContextInterface>();
-    String alias = getQuery().getAliasForTableName(getQuery().getCube().getName());
+    ArrayList<HQLContextInterface> contexts = new ArrayList<>();
+    String alias = query.getAliasForTableName(query.getCube().getName());
     for (String storageTable : fact.getStorageTables()) {
       SingleFactHQLContext ctx = new SingleFactHQLContext(fact, storageTable + " " + alias, dimsToQuery, query,
-          fact.getWhereClause(storageTable.substring(storageTable.indexOf(".") + 1)));
-      hqlContexts.add(ctx);
+        fact.getWhereClause(storageTable.substring(storageTable.indexOf(".") + 1)));
+      contexts.add(ctx);
     }
-    super.setHqlContexts(hqlContexts);
+    return contexts;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
index 9005826..c9ba561 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionHQLContext.java
@@ -20,70 +20,36 @@
 package org.apache.lens.cube.parse;
 
 import java.util.ArrayList;
-import java.util.LinkedHashSet;
 import java.util.List;
-import java.util.Set;
 
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.NotImplementedException;
-import org.apache.commons.lang.StringUtils;
 
 import lombok.AllArgsConstructor;
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-import lombok.Setter;
+import lombok.RequiredArgsConstructor;
 
 @AllArgsConstructor
-@NoArgsConstructor
-public abstract class UnionHQLContext implements HQLContextInterface {
+@RequiredArgsConstructor
+public abstract class UnionHQLContext extends SimpleHQLContext {
+  protected final CubeQueryContext query;
+  private final CandidateFact fact;
 
-  @Getter
-  @Setter
-  List<HQLContextInterface> hqlContexts = new ArrayList<HQLContextInterface>();
+  List<HQLContextInterface> hqlContexts = new ArrayList<>();
 
-  @Override
-  public String toHQL() throws LensException {
-    Set<String> queryParts = new LinkedHashSet<String>();
+  public void setHqlContexts(List<HQLContextInterface> hqlContexts) throws LensException {
+    this.hqlContexts = hqlContexts;
+    StringBuilder queryParts = new StringBuilder("(");
+    String sep = "";
     for (HQLContextInterface ctx : hqlContexts) {
-      queryParts.add(ctx.toHQL());
+      queryParts.append(sep).append(ctx.toHQL());
+      sep = " UNION ALL ";
     }
-    return StringUtils.join(queryParts, " UNION ALL ");
-  }
-
-  @Override
-  public String getSelect()  {
-    throw new NotImplementedException("Not Implemented");
-  }
-
-  @Override
-  public String getFrom() {
-    throw new NotImplementedException("Not Implemented");
+    setFrom(queryParts.append(") ").append(query.getCube().getName()).toString());
   }
 
   @Override
   public String getWhere() {
     throw new NotImplementedException("Not Implemented");
   }
-
-  @Override
-  public String getGroupby() {
-    throw new NotImplementedException("Not Implemented");
-  }
-
-  @Override
-  public String getHaving() {
-    throw new NotImplementedException("Not Implemented");
-  }
-
-  @Override
-  public String getOrderby() {
-    throw new NotImplementedException("Not Implemented");
-  }
-
-  @Override
-  public Integer getLimit() {
-    throw new NotImplementedException("Not Implemented");
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/main/resources/olap-query-conf.xml
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/resources/olap-query-conf.xml b/lens-cube/src/main/resources/olap-query-conf.xml
index 4c7b7fa..1436cd1 100644
--- a/lens-cube/src/main/resources/olap-query-conf.xml
+++ b/lens-cube/src/main/resources/olap-query-conf.xml
@@ -181,6 +181,21 @@
     </description>
   </property>
   <property>
+    <name>lens.cube.query.enable.storages.union</name>
+    <value>false</value>
+    <description>Sometimes One storage table doesn't contain all required partitions, and the query needs to be
+      answered from two storage tables. Enabling this (make value = &lt;true&gt;) allows rewrite of such queries.
+      If it's &lt;false&gt;, then such queries will fail in rewrite phase.
+      The feature should only be enabled when all the aggregate
+      functions used in the query (explicitly or implicitly picked from default aggregates of used measures) are
+      transitive. Transitive aggregate functions are those that follow the following property:\
+      \
+      &lt;&lt;&lt;f(a, b, c, d) = f(f(a, b), f(c, d)) for all possible values of a,b,c,d.&gt;&gt;&gt;\
+      \
+      e.g. SUM, MAX, MIN etc are transitive aggregate functions, while AVG, COUNT etc are not.
+    </description>
+  </property>
+  <property>
     <name>lens.cube.query.enable.flattening.bridge.tables</name>
     <value>false</value>
     <description>Flag specifies if fields selected have to be flattened or not, if they are coming from tables with many

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 92f5067..aa15a2c 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -139,7 +139,7 @@ public class CubeTestSetup {
   private static String c99 = "C99";
   private static Map<String, String> factValidityProperties = Maps.newHashMap();
   @Getter
-  private static Map<String, String> storageToUpdatePeriodMap = new LinkedHashMap<String, String>();
+  private static Map<String, List<UpdatePeriod>> storageToUpdatePeriodMap = new LinkedHashMap<>();
 
   static {
     Calendar cal = Calendar.getInstance();
@@ -213,6 +213,32 @@ public class CubeTestSetup {
     return MONTH_PARSER.format(dt);
   }
 
+  interface StoragePartitionProvider {
+    Map<String, String> providePartitionsForStorage(String storage);
+  }
+
+  public static String getExpectedUnionQuery(String cubeName, List<String> storages, StoragePartitionProvider provider,
+    String outerSelectPart, String outerWhere, String outerPostWhere, String innerQuerySelectPart,
+    String innerWhere, String innerPostWhere) {
+    if (!innerQuerySelectPart.trim().toLowerCase().endsWith("from")) {
+      innerQuerySelectPart += " from ";
+    }
+    StringBuilder sb = new StringBuilder();
+    sb.append(outerSelectPart);
+    if (!outerSelectPart.trim().toLowerCase().endsWith("from")) {
+      sb.append(" from ");
+    }
+    sb.append(" (");
+    String sep = "";
+    for (String storage : storages) {
+      sb.append(sep).append(getExpectedQuery(cubeName, innerQuerySelectPart + " ",
+        innerWhere, innerPostWhere, provider.providePartitionsForStorage(storage)));
+      sep = " UNION ALL ";
+    }
+    return sb.append(") ").append(cubeName).append(" ").append(outerWhere == null ? "" : outerWhere)
+      .append(" ").append(outerPostWhere == null ? "" : outerPostWhere).toString();
+  }
+
   public static String getExpectedQuery(String cubeName, String selExpr, String whereExpr, String postWhereExpr,
     Map<String, String> storageTableToWhereClause) {
     return getExpectedQuery(cubeName, selExpr, whereExpr, postWhereExpr, storageTableToWhereClause, null);
@@ -241,7 +267,7 @@ public class CubeTestSetup {
       expected.append(entry.getValue());
       expected.append(")");
       if (postWhereExpr != null) {
-        expected.append(postWhereExpr);
+        expected.append(" ").append(postWhereExpr);
       }
     }
     return expected.toString();
@@ -349,10 +375,11 @@ public class CubeTestSetup {
       storageTableToWhereClause.put(getStorageTableString(storageTables), whereClause);
     } else {
       for (String tbl : storageTables) {
-        String updatePeriod = storageToUpdatePeriodMap.get(tbl);
-        String whereClause = getWhereForDailyAndHourly2daysWithTimeDimUnionQuery(cubeName, timedDimension, from, to)
-            .get(updatePeriod);
-        storageTableToWhereClause.put(getStorageTableString(tbl), whereClause);
+        for (UpdatePeriod updatePeriod : storageToUpdatePeriodMap.get(tbl)) {
+          String whereClause = getWhereForDailyAndHourly2daysWithTimeDimUnionQuery(cubeName, timedDimension, from, to)
+            .get(updatePeriod.getName());
+          storageTableToWhereClause.put(getStorageTableString(tbl), whereClause);
+        }
       }
     }
     return storageTableToWhereClause;
@@ -392,7 +419,7 @@ public class CubeTestSetup {
   }
 
   public static Map<String, String> getWhereForDailyAndHourly2daysWithTimeDimUnionQuery(String cubeName,
-      String timedDimension, Date from, Date to) {
+    String timedDimension, Date from, Date to) {
     Map<String, String> updatePeriodToWhereMap = new HashMap<String, String>();
     List<String> hourlyparts = new ArrayList<String>();
     List<String> dailyparts = new ArrayList<String>();
@@ -400,7 +427,7 @@ public class CubeTestSetup {
     if (!CubeTestSetup.isZerothHour()) {
       addParts(hourlyparts, HOURLY, from, DateUtil.getCeilDate(from, DAILY));
       addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(to, DAILY),
-          DateUtil.getFloorDate(to, HOURLY));
+        DateUtil.getFloorDate(to, HOURLY));
       dayStart = DateUtil.getCeilDate(from, DAILY);
     } else {
       dayStart = from;
@@ -475,9 +502,9 @@ public class CubeTestSetup {
     Date monthStart = TWO_MONTHS_BACK;
     if (!CubeTestSetup.isZerothHour()) {
       addParts(hourlyparts, HOURLY, TWO_MONTHS_BACK,
-          DateUtil.getCeilDate(TWO_MONTHS_BACK, DAILY));
+        DateUtil.getCeilDate(TWO_MONTHS_BACK, DAILY));
       addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(NOW, DAILY),
-          DateUtil.getFloorDate(NOW, HOURLY));
+        DateUtil.getFloorDate(NOW, HOURLY));
       dayStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, DAILY);
       monthStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, MONTHLY);
     }
@@ -488,7 +515,7 @@ public class CubeTestSetup {
       monthStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, MONTHLY);
     }
     addParts(dailyparts, DAILY, DateUtil.getFloorDate(NOW, MONTHLY),
-        DateUtil.getFloorDate(NOW, DAILY));
+      DateUtil.getFloorDate(NOW, DAILY));
     addParts(monthlyparts, MONTHLY, monthStart, DateUtil.getFloorDate(NOW, MONTHLY));
 
     updatePeriodToPart.put("HOURLY", hourlyparts);
@@ -496,12 +523,14 @@ public class CubeTestSetup {
     updatePeriodToPart.put("MONTHLY", monthlyparts);
 
     List<String> unionParts = new ArrayList<String>();
-    for (Map.Entry<String, String> entry : storageToUpdatePeriodMap.entrySet()) {
-      String uperiod = entry.getKey();
-      String table = entry.getValue();
-      if (table.equals(storageTable) && updatePeriodToPart.containsKey(uperiod)) {
-        unionParts.addAll(updatePeriodToPart.get(uperiod));
-        Collections.sort(unionParts);
+    for (Map.Entry<String, List<UpdatePeriod>> entry : storageToUpdatePeriodMap.entrySet()) {
+      String table = entry.getKey();
+      for (UpdatePeriod updatePeriod : entry.getValue()) {
+        String uperiod = updatePeriod.getName();
+        if (table.equals(storageTable) && updatePeriodToPart.containsKey(uperiod)) {
+          unionParts.addAll(updatePeriodToPart.get(uperiod));
+          Collections.sort(unionParts);
+        }
       }
     }
 
@@ -591,6 +620,10 @@ public class CubeTestSetup {
     cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr1", "int", "first measure")));
     cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr2", "float", "second measure"), "Measure2", null, "SUM",
       "RS"));
+    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr21", "float", "second measure"), "Measure22", null, "SUM",
+      "RS"));
+    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr22", "float", "second measure"), "Measure22", null, "SUM",
+      "RS"));
     cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr3", "double", "third measure"), "Measure3", null, "MAX",
       null));
     cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr4", "bigint", "fourth measure"), "Measure4", null, "COUNT",
@@ -600,7 +633,7 @@ public class CubeTestSetup {
     cubeMeasures.add(new ColumnMeasure(new FieldSchema("newmeasure", "bigint", "measure available  from now"),
       "New measure", null, null, null, NOW, null, 100.0));
     cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr15", "int", "first measure"), "Measure15", null, "SUM",
-        "RS"));
+      "RS"));
 
     cubeDimensions = new HashSet<CubeDimAttribute>();
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("d_time", "timestamp", "d time")));
@@ -689,6 +722,10 @@ public class CubeTestSetup {
       "(1000 + sum(msr1) + sum(msr2))/100"));
     exprs.add(new ExprColumn(new FieldSchema("msr5", "double", "materialized in some facts"), "Fifth Msr",
       "msr2 + msr3"));
+    exprs.add(new ExprColumn(new FieldSchema("msr8", "double", "measure expression"), "Sixth Msr",
+      "msr2 + msr3"));
+    exprs.add(new ExprColumn(new FieldSchema("msr7", "double", "measure expression"), "Seventh Msr",
+      "case when sum(msr2) = 0 then 0 else sum(case when cityid='x' then msr21 else msr22 end)/sum(msr2) end"));
     exprs.add(new ExprColumn(new FieldSchema("equalsums", "double", "sums are equals"), "equalsums",
       new ExprSpec("msr3 + msr4", null, null), new ExprSpec("(msr3 + msr2)/100", null, null)));
     exprs.add(new ExprColumn(new FieldSchema("roundedmsr1", "double", "rounded measure1"), "Rounded msr1",
@@ -708,7 +745,7 @@ public class CubeTestSetup {
     exprs.add(new ExprColumn(new FieldSchema("msr6", "bigint", "sixth measure"), "Measure6",
       "sum(msr2) + max(msr3)/ count(msr4)"));
     exprs.add(new ExprColumn(new FieldSchema("booleancut", "boolean", "a boolean expression"), "Boolean cut",
-      "dim1 != 'x' AND dim2 != 10 "));
+      "(dim1 != 'x' AND dim2 != 10)"));
     exprs.add(new ExprColumn(new FieldSchema("substrexpr", "string", "a sub-string expression"), "Substr expr",
       new ExprSpec("substr(dim1, 3))", null, null), new ExprSpec("substr(ascii(testdim2.name), 3)", null, null)));
     exprs.add(new ExprColumn(new FieldSchema("substrexprdim2", "string", "a sub-string expression"), "Substr expr",
@@ -1146,7 +1183,7 @@ public class CubeTestSetup {
 
   }
 
-  private void createCubeContinuousFact(CubeMetastoreClient client) throws Exception{
+  private void createCubeContinuousFact(CubeMetastoreClient client) throws Exception {
     // create continuous raw fact only with extra measures
     String factName = "testFact_CONTINUOUS";
     List<FieldSchema> factColumns = new ArrayList<FieldSchema>();
@@ -1253,16 +1290,16 @@ public class CubeTestSetup {
     CubeFactTable fact = client.getFactTable(factName);
     Table table = client.getTable(MetastoreUtil.getStorageTableName(fact.getName(), Storage.getPrefix(c1)));
     assertEquals(table.getParameters().get(MetastoreUtil.getPartitionTimelineCachePresenceKey()), "true");
-    for (UpdatePeriod period: Lists.newArrayList(MINUTELY, MINUTELY, DAILY, MONTHLY, YEARLY, QUARTERLY)) {
-      for (String partCol: Lists.newArrayList("dt")) {
+    for (UpdatePeriod period : Lists.newArrayList(MINUTELY, MINUTELY, DAILY, MONTHLY, YEARLY, QUARTERLY)) {
+      for (String partCol : Lists.newArrayList("dt")) {
         assertTimeline(client, fact.getName(), c1, period, partCol, EndsAndHolesPartitionTimeline.class);
       }
     }
 
     table = client.getTable(MetastoreUtil.getStorageTableName(fact.getName(), Storage.getPrefix(c4)));
     assertEquals(table.getParameters().get(MetastoreUtil.getPartitionTimelineCachePresenceKey()), "true");
-    for (UpdatePeriod period: Lists.newArrayList(MINUTELY, MINUTELY, DAILY, MONTHLY, YEARLY, QUARTERLY)) {
-      for (String partCol: Lists.newArrayList("ttd", "ttd2")) {
+    for (UpdatePeriod period : Lists.newArrayList(MINUTELY, MINUTELY, DAILY, MONTHLY, YEARLY, QUARTERLY)) {
+      for (String partCol : Lists.newArrayList("ttd", "ttd2")) {
         assertTimeline(client, fact.getName(), c4, period, partCol, EndsAndHolesPartitionTimeline.class);
       }
     }
@@ -2266,7 +2303,7 @@ public class CubeTestSetup {
         });
       }
     });
-    Dimension userDim = new Dimension(dimName, dimAttrs, null, joinChains, dimProps,  0L);
+    Dimension userDim = new Dimension(dimName, dimAttrs, null, joinChains, dimProps, 0L);
     client.createDimension(userDim);
 
     String dimTblName = "usertable";
@@ -2377,6 +2414,7 @@ public class CubeTestSetup {
 
     client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
   }
+
   public void createSources(HiveConf conf, String dbName) throws Exception {
     try {
       Database database = new Database();

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
index 8da5263..753ca33 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
@@ -115,7 +115,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
         getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
     String expectedq7 =
       getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2) from ", null,
-        "group by testcube.cityid having" + " sum(testCube.msr2) > 100) OR (sum(testCube.msr2) < 100 AND"
+        "group by testcube.cityid having" + " sum(testCube.msr2) > 100 OR (sum(testCube.msr2) < 100 AND"
           + " max(testcube.msr3) > 1000)", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
     String expectedq8 =
       getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2) * max(testCube.msr3) from ", null,
@@ -139,7 +139,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     for (int i = 0; i < tests.length; i++) {
       String hql = rewrite(tests[i], conf);
       System.out.println("hql[" + i + "]:" + hql);
-      compareQueries(expected[i], hql);
+      compareQueries(hql, expected[i]);
     }
     aggregateFactSelectionTests(conf);
     rawFactSelectionTests(getConfWithStorages("C1,C2"));
@@ -156,7 +156,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     String expectedQL1 =
       getExpectedQuery(cubeName, "SELECT distinct testcube.cityid, testcube.zipcode, testcube.stateid" + " from ", null,
         null, getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-    compareQueries(expectedQL1, hQL1);
+    compareQueries(hQL1, expectedQL1);
 
     //Don't add distinct
     String query2 = "SELECT count (distinct testcube.cityid) from testcube where " + TWO_DAYS_RANGE;
@@ -164,7 +164,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     String expectedQL2 =
       getExpectedQuery(cubeName, "SELECT count (distinct testcube.cityid)" + " from ", null, null,
         getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-    compareQueries(expectedQL2, hQL2);
+    compareQueries(hQL2, expectedQL2);
 
     //Don't add distinct
     String query3 = "SELECT  testcube.cityid, count(distinct testcube.stateid) from testcube where " + TWO_DAYS_RANGE;
@@ -172,7 +172,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     String expectedQL3 =
       getExpectedQuery(cubeName, "SELECT testcube.cityid, count(distinct testcube.stateid)" + " from ", null,
         "group by testcube.cityid", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-    compareQueries(expectedQL3, hQL3);
+    compareQueries(hQL3, expectedQL3);
 
     //Don't add distinct
     String query4 = "SELECT  count(testcube.stateid) from testcube where " + TWO_DAYS_RANGE;
@@ -180,7 +180,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     String expectedQL4 =
       getExpectedQuery(cubeName, "SELECT count(testcube.stateid)" + " from ", null,
         null, getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-    compareQueries(expectedQL4, hQL4);
+    compareQueries(hQL4, expectedQL4);
 
     //Don't add distinct, by setting the flag false
     conf.setBoolean(CubeQueryConfUtil.ENABLE_ATTRFIELDS_ADD_DISTINCT, false);
@@ -189,7 +189,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     String expectedQL5 =
       getExpectedQuery(cubeName, "SELECT testcube.stateid" + " from ", null,
         null, getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-    compareQueries(expectedQL5, hQL5);
+    compareQueries(hQL5, expectedQL5);
 
 
   }
@@ -210,7 +210,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     String expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid," + " testCube.msr2 from ", null, null,
         getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
     conf2.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
     aggregateFactSelectionTests(conf2);
     conf2.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2");
@@ -222,16 +222,16 @@ public class TestAggregateResolver extends TestQueryRewrite {
     CubeQueryContext cubeql = rewriteCtx(query, conf);
     String hQL = cubeql.toHQL();
     String expectedQL =
-      getExpectedQuery(cubeName, "SELECT count(distinct testcube.cityid)," + " from ", null, null,
+      getExpectedQuery(cubeName, "SELECT count(distinct testcube.cityid) from ", null, null,
         getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     query = "SELECT distinct cityid from testcube where " + TWO_DAYS_RANGE;
     hQL = rewrite(query, conf);
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT distinct testcube.cityid," + " from ", null, null,
+      getExpectedQuery(cubeName, "SELECT distinct testcube.cityid from ", null, null,
         getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     // with aggregate resolver on/off, msr with its default aggregate around it
     // should pick up aggregated fact
@@ -241,7 +241,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2) from ", null,
         "group by testcube.cityid", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, sum(testCube.msr2) m2 FROM testCube WHERE " + TWO_DAYS_RANGE + " order by m2";
     cubeql = rewriteCtx(query, conf);
@@ -249,7 +249,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2) as `m2` from ", null,
         "group by testcube.cityid order by m2 asc", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " having max(msr3) > 100";
     cubeql = rewriteCtx(query, conf);
@@ -258,7 +258,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
       getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2) from ", null,
         "group by testcube.cityid having max(testcube.msr3) > 100",
         getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
   }
 
   private void rawFactSelectionTests(Configuration conf) throws ParseException, LensException {
@@ -270,9 +270,9 @@ public class TestAggregateResolver extends TestQueryRewrite {
     CandidateFact candidateFact = cubeql.getCandidateFacts().iterator().next();
     Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
     String expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " avg(testCube.msr2)) from ", null,
+      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " avg(testCube.msr2) from ", null,
         "group by testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     // query with measure in a where clause
     query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE testCube.msr1 < 100 and " + TWO_DAYS_RANGE;
@@ -282,9 +282,9 @@ public class TestAggregateResolver extends TestQueryRewrite {
     Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2)) from ", "testcube.msr1 < 100",
+      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2) from ", "testcube.msr1 < 100",
         "group by testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, testCube.msr2 FROM testCube WHERE testCube.msr2 < 100 and " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
@@ -295,7 +295,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid," + " testCube.msr2 from ", "testcube.msr2 < 100", null,
         getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " group by testCube.msr1";
     cubeql = rewriteCtx(query, conf);
@@ -304,9 +304,9 @@ public class TestAggregateResolver extends TestQueryRewrite {
     Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2)) from ", null,
+      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2) from ", null,
         " group by testCube.msr1, testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " group by testCube.msr3";
     cubeql = rewriteCtx(query, conf);
@@ -315,9 +315,9 @@ public class TestAggregateResolver extends TestQueryRewrite {
     Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2)) from ", null,
+      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2) from ", null,
         " group by testCube.msr3, testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " order by testCube.msr1";
     cubeql = rewriteCtx(query, conf);
@@ -326,9 +326,9 @@ public class TestAggregateResolver extends TestQueryRewrite {
     Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2)) from ", null,
+      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2) from ", null,
         " group by testcube.cityid order by testcube.msr1 asc", getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " order by testCube.msr3";
     cubeql = rewriteCtx(query, conf);
@@ -337,9 +337,9 @@ public class TestAggregateResolver extends TestQueryRewrite {
     Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2)) from ", null,
+      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2) from ", null,
         " group by testcube.cityid order by testcube.msr3 asc", getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     query = "SELECT distinct cityid, round(testCube.msr2) from testCube where " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
@@ -350,7 +350,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     expectedQL =
       getExpectedQuery(cubeName, "SELECT distinct testcube.cityid," + " round(testCube.msr2) from ", null, null,
         getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, count(distinct(testCube.msr2)) from testCube where " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
@@ -361,7 +361,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid, count(distinct testCube.msr2) from ", null,
         "group by testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     // query with no default aggregate measure
     query = "SELECT cityid, round(testCube.msr1) from testCube where " + TWO_DAYS_RANGE;
@@ -373,7 +373,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid," + " round(testCube.msr1) from ", null, null,
         getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     query = "SELECT distinct cityid, round(testCube.msr1) from testCube where " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
@@ -384,7 +384,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     expectedQL =
       getExpectedQuery(cubeName, "SELECT distinct testcube.cityid," + " round(testCube.msr1) from ", null, null,
         getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, count(distinct(testCube.msr1)) from testCube where " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
@@ -395,7 +395,7 @@ public class TestAggregateResolver extends TestQueryRewrite {
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid, count(distinct testCube.msr1) from ", null,
         "group by testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
 
     query = "SELECT cityid, sum(testCube.msr1) from testCube where " + TWO_DAYS_RANGE;
     cubeql = rewriteCtx(query, conf);
@@ -404,15 +404,15 @@ public class TestAggregateResolver extends TestQueryRewrite {
     Assert.assertEquals("testFact2_raw".toLowerCase(), candidateFact.fact.getName().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
-      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr1)) from ", null,
+      getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr1) from ", null,
         "group by testcube.cityid", getWhereForHourly2days("c1_testfact2_raw"));
-
+    compareQueries(hQL, expectedQL);
     query = "SELECT cityid, sum(testCube.msr2) FROM testCube WHERE " + TWO_DAYS_RANGE + " having max(msr1) > 100";
     cubeql = rewriteCtx(query, conf);
     hQL = cubeql.toHQL();
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid," + " sum(testCube.msr2) from ", null,
         "group by testcube.cityid having max(testcube.msr1) > 100", getWhereForHourly2days("c1_testfact2_raw"));
-    compareQueries(expectedQL, hQL);
+    compareQueries(hQL, expectedQL);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 4acd063..494f81b 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -19,10 +19,12 @@
 
 package org.apache.lens.cube.parse;
 
-import static org.apache.lens.cube.metadata.UpdatePeriod.DAILY;
-import static org.apache.lens.cube.metadata.UpdatePeriod.HOURLY;
+import static org.apache.lens.cube.metadata.UpdatePeriod.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.*;
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.getValidStorageTablesKey;
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.getValidUpdatePeriodsKey;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
+import static org.apache.lens.cube.parse.CubeTestSetup.getWhereForMonthlyDailyAndHourly2monthsUnionQuery;
 
 import static org.testng.Assert.*;
 
@@ -37,7 +39,6 @@ import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.time.DateUtils;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -51,8 +52,8 @@ import org.testng.annotations.Test;
 
 import com.google.common.base.Splitter;
 import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
-
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
@@ -361,7 +362,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
     conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact2");
-    conf.set(CubeQueryConfUtil.getValidStorageTablesKey("testFact2"), "C1_testFact2");
+    conf.set(getValidStorageTablesKey("testFact2"), "C1_testFact2");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
@@ -369,16 +370,16 @@ public class TestCubeRewriter extends TestQueryRewrite {
     compareQueries(hqlQuery, expected);
 
     conf.set(CubeQueryConfUtil.getValidFactTablesKey(cubeName), "testFact");
-    conf.set(CubeQueryConfUtil.getValidStorageTablesKey("testfact"), "C1_testFact");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C1"), "HOURLY");
+    conf.set(getValidStorageTablesKey("testfact"), "C1_testFact");
+    conf.set(getValidUpdatePeriodsKey("testfact", "C1"), "HOURLY");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c1_testfact"));
     compareQueries(hqlQuery, expected);
 
     conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
-    conf.set(CubeQueryConfUtil.getValidStorageTablesKey("testfact"), "C2_testFact");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C2"), "HOURLY");
+    conf.set(getValidStorageTablesKey("testfact"), "C2_testFact");
+    conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "HOURLY");
     hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null, getWhereForHourly2days("c2_testfact"));
@@ -395,33 +396,110 @@ public class TestCubeRewriter extends TestQueryRewrite {
   }
 
   @Test
-  public void testCubeWhereQueryDuplicatePartitionElimination() throws Exception {
+  public void testUnionQueries() throws Exception {
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.getValidStorageTablesKey("testfact"), "C1_testFact,C2_testFact");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C1"), "DAILY,HOURLY");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact2", "C1"), "YEARLY");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C2"), "MONTHLY,DAILY");
-
+    conf.set(getValidStorageTablesKey("testfact"), "C1_testFact,C2_testFact");
+    conf.set(getValidUpdatePeriodsKey("testfact", "C1"), "DAILY,HOURLY");
+    conf.set(getValidUpdatePeriodsKey("testfact2", "C1"), "YEARLY");
+    conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "MONTHLY,DAILY");
+    ArrayList<String> storages = Lists.newArrayList("c1_testfact", "c2_testfact");
     try {
-      CubeTestSetup.getStorageToUpdatePeriodMap().put("HOURLY", "c1_testfact");
-      CubeTestSetup.getStorageToUpdatePeriodMap().put("DAILY", "c1_testfact");
-      CubeTestSetup.getStorageToUpdatePeriodMap().put("MONTHLY", "c2_testfact");
+      CubeTestSetup.getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(HOURLY, DAILY));
+      CubeTestSetup.getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(MONTHLY));
 
       // Union query
-      String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
-      System.out.println("HQL: " + hqlQuery);
-
-      String expected1 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForMonthlyDailyAndHourly2monthsUnionQuery("c1_testfact"));
-      String expected2 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForMonthlyDailyAndHourly2monthsUnionQuery("c2_testfact"));
-
-      System.out.println("Expected1 : " + expected1);
-      System.out.println("Expected2 : " + expected2);
-
-      TestCubeRewriter.compareContains(expected1, hqlQuery);
-      TestCubeRewriter.compareContains(expected2, hqlQuery);
-      TestCubeRewriter.compareContains("UNION ALL", hqlQuery);
+      String hqlQuery;
+      String expected;
+      StoragePartitionProvider provider = new StoragePartitionProvider() {
+        @Override
+        public Map<String, String> providePartitionsForStorage(String storage) {
+          return getWhereForMonthlyDailyAndHourly2monthsUnionQuery(storage);
+        }
+      };
+      try{
+        rewrite("select cityid as `City ID`, msr8, msr7 as `Third measure` "
+          + "from testCube where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
+        fail("Union feature is disabled, should have failed");
+      } catch (LensException e) {
+        assertEquals(e.getErrorCode(), LensCubeErrorCode.STORAGE_UNION_DISABLED.getLensErrorInfo().getErrorCode());
+      }
+      conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, true);
+
+      hqlQuery = rewrite("select cityid as `City ID`, msr8, msr7 as `Third measure` "
+        + "from testCube where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
+
+      expected = getExpectedUnionQuery(cubeName, storages, provider,
+        "SELECT testcube.alias0 as `City ID`, sum(testcube.alias1) + max(testcube.alias2), "
+          + "case when sum(testcube.alias1) = 0 then 0 else sum(testcube.alias3)/sum(testcube.alias1) end "
+          + "as `Third Measure`",
+        null, "group by testcube.alias0",
+        "select testcube.cityid as `alias0`, sum(testcube.msr2) as `alias1`, "
+          + "max(testcube.msr3) as `alias2`, "
+          + "sum(case when testcube.cityid = 'x' then testcube.msr21 else testcube.msr22 end) as `alias3`",
+        null, "group by testcube.cityid");
+
+      compareQueries(hqlQuery, expected);
+
+      hqlQuery = rewrite("select cityid as `City ID`, msr3 as `Third measure` from testCube where "
+        + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr7 > 10", conf);
+
+      expected = getExpectedUnionQuery(cubeName, storages, provider,
+        "SELECT testcube.alias0 as `City ID`, max(testcube.alias1) as `Third measure`",
+        null, "group by testcube.alias0 having "
+          + "(case when sum(testcube.alias2)=0 then 0 else sum(testcube.alias3)/sum(testcube.alias2) end > 10 )",
+        "SELECT testcube.cityid as `alias0`, max(testcube.msr3) as `alias1`, "
+          + "sum(testcube.msr2) as `alias2`, "
+          + "sum(case when testcube.cityid='x' then testcube.msr21 else testcube.msr22 end) as `alias3`",
+        null, "group by testcube.cityid");
+      compareQueries(hqlQuery, expected);
+
+      hqlQuery = rewrite("select cityid as `City ID`, msr3 as `Third measure` from testCube where "
+        + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr8 > 10", conf);
+
+      expected = getExpectedUnionQuery(cubeName, storages, provider,
+        "SELECT testcube.alias0 as `City ID`, max(testcube.alias1) as `Third measure`",
+        null, "GROUP BY testcube.alias0 "
+          + "HAVING (sum(testcube.alias2) + max(testcube.alias1)) > 10 ",
+        "SELECT testcube.cityid as `alias0`, max(testcube.msr3) as `alias1`, "
+          + "sum(testcube.msr2)as `alias2`", null, "group by testcube.cityid");
+      compareQueries(hqlQuery, expected);
+
+      hqlQuery = rewrite("select msr3 as `Measure 3` from testCube where "
+        + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr2 > 10 and msr2 < 100", conf);
+
+      expected = getExpectedUnionQuery(cubeName, storages, provider,
+        "SELECT max(testcube.alias0) as `Measure 3` ",
+        null, " HAVING sum(testcube.alias1) > 10 and sum(testcube.alias1) < 100",
+        "SELECT max(testcube.msr3) as `alias0`, sum(testcube.msr2) as `alias1`", null, null);
+      compareQueries(hqlQuery, expected);
+
+      hqlQuery = rewrite("select zipcode, cityid as `City ID`, msr3 as `Measure 3`, msr4, "
+        + "SUM(msr2) as `Measure 2` from testCube where "
+        + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr4 > 10 order by cityid desc limit 5", conf);
+
+      expected = getExpectedUnionQuery(cubeName, storages, provider,
+        "SELECT testcube.alias0, testcube.alias1 as `City ID`, max(testcube.alias2) as `Measure 3`, "
+          + "count(testcube.alias3), sum(testcube.alias4) as `Measure 2`",
+        null, "group by testcube.alias0, testcube.alias1 "
+          + " having count(testcube.alias3) > 10 order by testcube.alias1 desc limit 5",
+        "select testcube.zipcode as `alias0`, testcube.cityid as `alias1`, "
+          + "max(testcube.msr3) as `alias2`,count(testcube.msr4) as `alias3`, sum(testcube.msr2) as `alias4`",
+        null, "group by testcube.zipcode, testcube.cityid ");
+      compareQueries(hqlQuery, expected);
+
+      conf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, false);
+      conf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, false);
+      hqlQuery = rewrite("select cityid as `City ID`, msr3 as `Measure 3`, "
+        + "SUM(msr2) as `Measure 2` from testCube" + " where "
+        + TWO_MONTHS_RANGE_UPTO_HOURS + " group by zipcode having msr4 > 10 order by cityid desc limit 5", conf);
+
+      expected = getExpectedUnionQuery(cubeName, storages, provider,
+        "SELECT testcube.alias0 as `City ID`,max(testcube.alias1) as `Measure 3`,sum(testcube.alias2) as `Measure 2` ",
+        null, "group by testcube.alias3 having count(testcube.alias4) > 10 order by testcube.alias0 desc limit 5",
+        "SELECT testcube.cityid as `alias0`, max(testcube.msr3) as `alias1`, "
+          + "sum(testcube.msr2) as `alias2`, testcube.zipcode as `alias3`, count(testcube .msr4) as `alias4` FROM ",
+        null, "GROUP BY testcube.zipcode");
+      compareQueries(hqlQuery, expected);
     } finally {
       CubeTestSetup.getStorageToUpdatePeriodMap().clear();
     }
@@ -431,30 +509,30 @@ public class TestCubeRewriter extends TestQueryRewrite {
   @Test
   public void testCubeWhereQueryWithMultipleTables() throws Exception {
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.getValidStorageTablesKey("testfact"), "C1_testFact,C2_testFact");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C1"), "DAILY");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact2", "C1"), "YEARLY");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C2"), "HOURLY");
-
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("c1_testfact", "DAILY");
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("c2_testfact", "HOURLY");
-
+    conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, true);
+    conf.set(getValidStorageTablesKey("testfact"), "C1_testFact,C2_testFact");
+    conf.set(getValidUpdatePeriodsKey("testfact", "C1"), "DAILY");
+    conf.set(getValidUpdatePeriodsKey("testfact2", "C1"), "YEARLY");
+    conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "HOURLY");
+
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(DAILY));
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(HOURLY));
+    StoragePartitionProvider provider = new StoragePartitionProvider() {
+      @Override
+      public Map<String, String> providePartitionsForStorage(String storage) {
+        return getWhereForDailyAndHourly2days(cubeName, storage);
+      }
+    };
     try {
       // Union query
       String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
       System.out.println("HQL:" + hqlQuery);
 
-      String expected1 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "c1_testfact"));
-      String expected2 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForDailyAndHourly2days(cubeName, "c2_testfact"));
-
-      System.out.println("Expected1 : " + expected1);
-      System.out.println("Expected2 : " + expected2);
-
-      TestCubeRewriter.compareContains(expected1, hqlQuery);
-      TestCubeRewriter.compareContains(expected2, hqlQuery);
-      TestCubeRewriter.compareContains("UNION ALL", hqlQuery);
+      String expected = getExpectedUnionQuery(cubeName, Lists.newArrayList("c1_testfact", "c2_testfact"), provider,
+        "select sum(testcube.alias0) ", null, null,
+        "select sum(testcube.msr2) as `alias0` from ", null, null
+      );
+      compareQueries(hqlQuery, expected);
     } finally {
       CubeTestSetup.getStorageToUpdatePeriodMap().clear();
     }
@@ -463,38 +541,34 @@ public class TestCubeRewriter extends TestQueryRewrite {
   @Test
   public void testCubeWhereQueryWithMultipleTablesForMonth() throws Exception {
     Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C0,C1,C2,C3,C4,C5");
-    conf.set(CubeQueryConfUtil.getValidStorageTablesKey("testfact"), "");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C1"), "HOURLY");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact2", "C1"), "YEARLY");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact2_raw", "C3"), "YEARLY");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C2"), "DAILY");
-    conf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C3"), "MONTHLY");
-
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("HOURLY", "c1_testfact");
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("DAILY", "c2_testfact");
-    CubeTestSetup.getStorageToUpdatePeriodMap().put("MONTHLY", "c3_testfact");
-
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2,C3");
+    conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, true);
+    conf.set(getValidStorageTablesKey("testfact"), "");
+    conf.set(getValidUpdatePeriodsKey("testfact", "C1"), "HOURLY");
+    conf.set(getValidUpdatePeriodsKey("testfact2", "C1"), "YEARLY");
+    conf.set(getValidUpdatePeriodsKey("testfact2_raw", "C3"), "YEARLY");
+    conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "DAILY");
+    conf.set(getValidUpdatePeriodsKey("testfact", "C3"), "MONTHLY");
+
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(HOURLY));
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(DAILY));
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("c3_testfact", Lists.newArrayList(MONTHLY));
+    StoragePartitionProvider provider = new StoragePartitionProvider() {
+      @Override
+      public Map<String, String> providePartitionsForStorage(String storage) {
+        return getWhereForMonthlyDailyAndHourly2monthsUnionQuery(storage);
+      }
+    };
     try {
       // Union query
       String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
       System.out.println("HQL:" + hqlQuery);
-
-      String expected1 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForMonthlyDailyAndHourly2monthsUnionQuery("c1_testfact"));
-      String expected2 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForMonthlyDailyAndHourly2monthsUnionQuery("c2_testfact"));
-      String expected3 = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", null, null,
-        getWhereForMonthlyDailyAndHourly2monthsUnionQuery("c3_testfact"));
-
-      System.out.println("Expected1 : " + expected1);
-      System.out.println("Expected2 : " + expected2);
-      System.out.println("Expected3 : " + expected3);
-
-      TestCubeRewriter.compareContains(expected1, hqlQuery);
-      TestCubeRewriter.compareContains(expected2, hqlQuery);
-      TestCubeRewriter.compareContains(expected3, hqlQuery);
-      TestCubeRewriter.compareContains("UNION ALL", hqlQuery);
+      ArrayList<String> storages = Lists.newArrayList("c1_testfact", "c3_testfact", "c2_testfact");
+      String expected = getExpectedUnionQuery(cubeName, storages, provider,
+        "select sum(testcube.alias0)", null, null,
+        "select sum(testcube.msr2) as `alias0` from ", null, null
+      );
+      compareQueries(hqlQuery, expected);
     } finally {
       CubeTestSetup.getStorageToUpdatePeriodMap().clear();
     }
@@ -679,19 +753,19 @@ public class TestCubeRewriter extends TestQueryRewrite {
     hqlQuery =
       rewrite("select SUM(msr2) from testCube" + " join citydim on testCube.cityid = citydim.id" + " where "
         + TWO_DAYS_RANGE + " group by name", conf);
-    compareQueries(expected, hqlQuery);
+    compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select cityid, SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     expected =
       getExpectedQuery(cubeName, "select testcube.cityid," + " sum(testcube.msr2) FROM ", null,
         " group by testcube.cityid ", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-    compareQueries(expected, hqlQuery);
+    compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select round(cityid), SUM(msr2) from" + " testCube where " + TWO_DAYS_RANGE, conf);
     expected =
       getExpectedQuery(cubeName, "select round(testcube.cityid)," + " sum(testcube.msr2) FROM ", null,
         " group by round(testcube.cityid) ", getWhereForDailyAndHourly2days(cubeName, "C2_testfact"));
-    compareQueries(expected, hqlQuery);
+    compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select SUM(msr2) from testCube" + "  where " + TWO_DAYS_RANGE + "group by round(zipcode)", conf);
@@ -1125,7 +1199,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     hqlQuery = rewrite("select name n, count(1) from citydim" + " group by name order by n ", conf);
     expected =
       getExpectedQuery("citydim", "select citydim.name as `n`," + " count(1) from ",
-        "groupby citydim.name order by n asc", "c2_citytable", false);
+        " group by citydim.name order by n asc", "c2_citytable", false);
     compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select name as `n`, count(1) from citydim" + " order by n ", conf);
@@ -1133,7 +1207,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     hqlQuery = rewrite("select count(1) from citydim" + " group by name order by name ", conf);
     expected =
       getExpectedQuery("citydim", "select citydim.name," + " count(1) from ",
-        "groupby citydim.name order by citydim.name asc ", "c2_citytable", false);
+        " group by citydim.name order by citydim.name asc ", "c2_citytable", false);
     compareQueries(hqlQuery, expected);
   }
 
@@ -1188,11 +1262,11 @@ public class TestCubeRewriter extends TestQueryRewrite {
       getExpectedQuery("t", "SELECT t.cityid, sum(t.msr2) FROM ", null, " group by t.cityid",
         getWhereForDailyAndHourly2days("t", "C2_testfact")),
       getExpectedQuery(cubeName, "SELECT testCube.cityid, sum(testCube.msr2)" + " FROM ",
-        " testcube.cityid > 100 ", " group by testcube.cityid having" + " sum(testCube.msr2 < 1000)",
+        " testcube.cityid > 100 ", " group by testcube.cityid having" + " sum(testCube.msr2) < 1000",
         getWhereForDailyAndHourly2days(cubeName, "C2_testfact")),
       getExpectedQuery(cubeName, "SELECT testCube.cityid, sum(testCube.msr2)" + " FROM ",
         " testcube.cityid > 100 ", " group by testcube.cityid having"
-          + " sum(testCube.msr2 < 1000) orderby testCube.cityid asc",
+          + " sum(testCube.msr2) < 1000 order by testCube.cityid asc",
         getWhereForDailyAndHourly2days(cubeName, "C2_testfact")),
     };
     Configuration conf = getConf();
@@ -1435,10 +1509,10 @@ public class TestCubeRewriter extends TestQueryRewrite {
     String hqlQuery = rewrite(cubeQl, conf);
     String db = getDbName();
     String expectedJoin =
-      " LEFT OUTER JOIN " + db + ".c1_citytable c1 ON (( testcube . cityid ) = ( c1 . id )) AND (c1.dt = 'latest') "
+      " LEFT OUTER JOIN " + db + "c1_citytable c1 ON (( testcube . cityid ) = ( c1 . id )) AND (c1.dt = 'latest') "
         + " LEFT OUTER JOIN " + db
-        + ".c1_statetable s1 ON (( c1 . stateid ) = ( s1 . id )) AND (s1.dt = 'latest') " + " LEFT OUTER JOIN "
-        + db + ".c1_citytable c2 ON (( s1 . countryid ) = ( c2 . id )) AND (c2.dt = 'latest')";
+        + "c1_statetable s1 ON (( c1 . stateid ) = ( s1 . id )) AND (s1.dt = 'latest') " + " LEFT OUTER JOIN "
+        + db + "c1_citytable c2 ON (( s1 . countryid ) = ( c2 . id )) AND (c2.dt = 'latest')";
 
     String expected =
       getExpectedQuery(cubeName, "select sum(testcube.msr2)" + " FROM ", expectedJoin, null, null, null,

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
index 64b1ac6..d16ea4c 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
@@ -63,7 +63,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
       getExpectedQuery(cubeName, "select testcube.dim2big1," + " max(testcube.msr3), sum(testcube.msr2) FROM ", null,
         " group by testcube.dim2big1", getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary4"),
         null);
-    TestCubeRewriter.compareQueries(expecteddim2big1, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big1);
     // with another table
     hqlQuery = rewrite("select dim2big1, citydim.name, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange,
       conf);
@@ -73,14 +73,14 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
       " group by testcube.dim2big1, citydim.name", null,
       getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary4"),
       null);
-    TestCubeRewriter.compareQueries(expecteddim2big1WithAnotherTable, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big1WithAnotherTable);
 
     hqlQuery = rewrite("select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf);
     String expecteddim2big2 =
       getExpectedQuery(cubeName, "select testcube.dim2big2, max(testcube.msr3), sum(testcube.msr2) FROM ", null,
         " group by testcube.dim2big2", getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary4"),
         null);
-    TestCubeRewriter.compareQueries(expecteddim2big2, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big2);
 
     Configuration conf2 = new Configuration(conf);
     conf2.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
@@ -94,12 +94,12 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
         " group by testdim3.name, (testcube.dim2big1)", null,
         getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary4"),
         null);
-    TestCubeRewriter.compareQueries(expected, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select dim2big1, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf2);
-    TestCubeRewriter.compareQueries(expecteddim2big1, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big1);
     hqlQuery = rewrite("select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + twoDaysITRange, conf2);
-    TestCubeRewriter.compareQueries(expecteddim2big2, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big2);
   }
 
   @Test
@@ -114,7 +114,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
           + getDbName() + "c1_testdim2tbl2 testdim2 ON testcube.dim2 = "
           + " testdim2.id and (testdim2.dt = 'latest') ", null, "group by (testdim2.bigid1)", null,
         getWhereForDailyAndHourly2days(cubeName, "c1_summary2"));
-    TestCubeRewriter.compareQueries(expected, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select testdim2.name, dim2big1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE, tconf);
@@ -124,7 +124,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
           + getDbName() + "c1_testdim2tbl2 testdim2 ON testcube.dim2 = "
           + " testdim2.id and (testdim2.dt = 'latest') ", null, "group by testdim2.name, testdim2.bigid1", null,
         getWhereForDailyAndHourly2days(cubeName, "c1_summary2"));
-    TestCubeRewriter.compareQueries(expected, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select testdim2.name, dim2big1, max(msr3)," + " msr2 from testCube left outer join testdim2"
@@ -135,7 +135,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
           + getDbName() + "c1_testdim2tbl2 testdim2 ON testcube.dim2 = "
           + " testdim2.id and (testdim2.dt = 'latest') ", null, "group by testdim2.name, testdim2.bigid1", null,
         getWhereForDailyAndHourly2days(cubeName, "c1_summary2"));
-    TestCubeRewriter.compareQueries(expected, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     hqlQuery =
       rewrite("select testdim3.name, dim2big1, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE, tconf);
@@ -147,7 +147,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
           + "c1_testdim3tbl testdim3 on " + "testdim2.testdim3id = testdim3.id AND (testdim3.dt = 'latest')",
         null, " group by testdim3.name, (testdim2.bigid1)", null,
         getWhereForDailyAndHourly2days(cubeName, "c1_summary2"));
-    TestCubeRewriter.compareQueries(expected, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
     LensException e = getLensExceptionInRewrite(
       "select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE, tconf);
     PruneCauses.BriefAndDetailedError error = extractPruneCause(e);
@@ -200,7 +200,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
         null, " group by substr(testcube.dim2big1, 5)",
         getWhereForDailyAndHourly2daysWithTimeDim(cubeName, "it", "C2_summary4"),
         null);
-    TestCubeRewriter.compareQueries(expecteddim2big1, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expecteddim2big1);
   }
 
   @Test
@@ -214,7 +214,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
         " JOIN " + getDbName() + "c1_testdim2tbl2 testdim2 ON testcube.dim2 = "
           + " testdim2.id and (testdim2.dt = 'latest') ", null, "group by substr(testdim2.bigid1, 5)", null,
         getWhereForDailyAndHourly2days(cubeName, "c1_summary2"));
-    TestCubeRewriter.compareQueries(expected, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
 
   @Test
@@ -225,13 +225,13 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
         + " citydim.stateid = statedim.id and (statedim.dt = 'latest')";
     String expected = getExpectedQuery("citydim", "SELECT citydim.name, statedim.name FROM ", joinExpr, null, null,
         "c1_citytable", true);
-    TestCubeRewriter.compareQueries(expected, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     hqlQuery = rewrite("select citydim.statename, citydim.name  from" + " citydim", conf);
 
     expected = getExpectedQuery("citydim", "SELECT statedim.name, citydim.name FROM ", joinExpr, null, null,
         "c1_citytable", true);
-    TestCubeRewriter.compareQueries(expected, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
 
     // Query would fail because citydim.nocandidatecol does not exist in any
     // candidate
@@ -321,7 +321,7 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
     String expected =
       getExpectedQuery("citydim", "SELECT citydim.name, concat(citydim.name, \":\", statedim.name) FROM ",
         joinExpr, null, null, "c1_citytable", true);
-    TestCubeRewriter.compareQueries(expected, hqlQuery);
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lens/blob/c445730c/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
index 9dcced0..1e21fb0 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
@@ -417,9 +417,9 @@ public class TestExpressionResolver extends TestQueryRewrite {
 
     String joinExpr =
       ""
-        + " join " + getDbName() + ".c1_statetable statedim on ct.stateid = statedim.id and (statedim.dt = 'latest')"
-        + " join " + getDbName() + ".c1_countrytable countrydim on statedim.countryid = countrydim.id"
-        + " join " + getDbName() + ".c1_ziptable zipdim on ct.zipcode = zipdim.code and (zipdim.dt = 'latest')"
+        + " join " + getDbName() + "c1_statetable statedim on ct.stateid = statedim.id and (statedim.dt = 'latest')"
+        + " join " + getDbName() + "c1_countrytable countrydim on statedim.countryid = countrydim.id"
+        + " join " + getDbName() + "c1_ziptable zipdim on ct.zipcode = zipdim.code and (zipdim.dt = 'latest')"
         + "";
 
     String expected =
@@ -443,9 +443,9 @@ public class TestExpressionResolver extends TestQueryRewrite {
 
     String joinExpr =
       ""
-        + " join " + getDbName() + ".c1_statetable statedim on ct.stateid = statedim.id and (statedim.dt = 'latest')"
-        + " join " + getDbName() + ".c1_countrytable countrydim on statedim.countryid = countrydim.id"
-        + " join " + getDbName() + ".c1_ziptable zipdim on ct.zipcode = zipdim.code and (zipdim.dt = 'latest')"
+        + " join " + getDbName() + "c1_statetable statedim on ct.stateid = statedim.id and (statedim.dt = 'latest')"
+        + " join " + getDbName() + "c1_countrytable countrydim on statedim.countryid = countrydim.id"
+        + " join " + getDbName() + "c1_ziptable zipdim on ct.zipcode = zipdim.code and (zipdim.dt = 'latest')"
         + "";
 
     String expected =


[46/50] [abbrv] lens git commit: LENS-750 : Add Hive error codes for Semantic and Authorization exceptions

Posted by sh...@apache.org.
LENS-750 : Add Hive error codes for Semantic and Authorization exceptions


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/bf1053b4
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/bf1053b4
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/bf1053b4

Branch: refs/heads/LENS-581
Commit: bf1053b4a1081bd3f07d6b26337e68586404e530
Parents: c179081
Author: Deepak Barr <de...@apache.org>
Authored: Fri Dec 18 12:58:13 2015 +0530
Committer: Deepak Kumar Barr <de...@apache.org>
Committed: Fri Dec 18 12:58:13 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/lens-errors.conf    | 16 +++++++-
 .../org/apache/lens/driver/hive/HiveDriver.java | 17 +++++++--
 .../lens/driver/hive/LensHiveErrorCode.java     | 36 ++++++++++++++++++
 .../server/query/QueryAPIErrorResponseTest.java |  5 ++-
 .../lens/server/query/TestQueryService.java     | 40 ++++++++++++++++++--
 5 files changed, 103 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/bf1053b4/lens-api/src/main/resources/lens-errors.conf
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/lens-errors.conf b/lens-api/src/main/resources/lens-errors.conf
index c880543..06960a0 100644
--- a/lens-api/src/main/resources/lens-errors.conf
+++ b/lens-api/src/main/resources/lens-errors.conf
@@ -317,6 +317,20 @@ lensCubeErrorsForMetastore = [
 
 ]
 
+lensHiveErrors = [
+  {
+    errorCode = 4001
+    httpStatusCode = ${BAD_REQUEST}
+    errorMsg = "Semantic Error : %s"
+  }
+
+  {
+    errorCode = 4002
+    httpStatusCode = ${INTERNAL_SERVER_ERROR}
+    errorMsg = "Hive Error : %s"
+  }
+]
+
 lensCubeErrors = ${lensCubeErrorsForQuery}${lensCubeErrorsForMetastore}
 
 # Overriding errors in lens-errors.conf via lens-errors-override.conf:
@@ -350,4 +364,4 @@ lensCubeErrors = ${lensCubeErrorsForQuery}${lensCubeErrorsForMetastore}
 # Lens server and Lens client are only aware of errors array. They are not aware of any other array defined in
 # error configuration files. Hence an errors array is prepared which is a concatenation of all other error arrays.
 
-errors = ${lensCommonErrors}${lensServerErrors}${lensCubeErrors}
+errors = ${lensCommonErrors}${lensServerErrors}${lensCubeErrors}${lensHiveErrors}

http://git-wip-us.apache.org/repos/asf/lens/blob/bf1053b4/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index 7391f47..c7ef8f1 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -18,6 +18,7 @@
  */
 package org.apache.lens.driver.hive;
 
+import static org.apache.lens.driver.hive.LensHiveErrorCode.*;
 import static org.apache.lens.server.api.util.LensUtil.getImplementations;
 
 import java.io.ByteArrayInputStream;
@@ -508,6 +509,7 @@ public class HiveDriver extends AbstractLensDriver {
   // assuming this is only called for executing explain/insert/set/delete/etc... queries which don't ask to fetch data.
   public LensResultSet execute(QueryContext ctx) throws LensException {
     OperationHandle op = null;
+    LensResultSet result = null;
     try {
       addPersistentPath(ctx);
       Configuration qdconf = ctx.getDriverConf(this);
@@ -525,24 +527,24 @@ public class HiveDriver extends AbstractLensDriver {
       if (status.getState() == OperationState.ERROR) {
         throw new LensException("Unknown error while running query " + ctx.getUserQuery());
       }
-      LensResultSet result = createResultSet(ctx, true);
+      result = createResultSet(ctx, true);
       // close the query immediately if the result is not inmemory result set
       if (result == null || !(result instanceof HiveInMemoryResultSet)) {
         closeQuery(ctx.getQueryHandle());
       }
       // remove query handle from hiveHandles even in case of inmemory result set
       hiveHandles.remove(ctx.getQueryHandle());
-      return result;
     } catch (IOException e) {
       throw new LensException("Error adding persistent path", e);
     } catch (HiveSQLException hiveErr) {
       handleHiveServerError(ctx, hiveErr);
-      throw new LensException("Error executing query", hiveErr);
+      handleHiveSQLException(hiveErr);
     } finally {
       if (null != op) {
         opHandleToSession.remove(op);
       }
     }
+    return result;
   }
 
   /*
@@ -569,10 +571,17 @@ public class HiveDriver extends AbstractLensDriver {
       throw new LensException("Error adding persistent path", e);
     } catch (HiveSQLException e) {
       handleHiveServerError(ctx, e);
-      throw new LensException("Error executing async query", e);
+      handleHiveSQLException(e);
     }
   }
 
+  private LensException handleHiveSQLException(HiveSQLException ex) throws LensException {
+    if (ex.getMessage().contains("SemanticException")) {
+      throw new LensException(SEMANTIC_ERROR.getLensErrorInfo(), ex, ex.getMessage());
+    }
+    throw new LensException(HIVE_ERROR.getLensErrorInfo(), ex, ex.getMessage());
+  }
+
   /*
    * (non-Javadoc)
    *

http://git-wip-us.apache.org/repos/asf/lens/blob/bf1053b4/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/LensHiveErrorCode.java
----------------------------------------------------------------------
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/LensHiveErrorCode.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/LensHiveErrorCode.java
new file mode 100644
index 0000000..3bac9e7
--- /dev/null
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/LensHiveErrorCode.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.driver.hive;
+
+import org.apache.lens.server.api.LensErrorInfo;
+
+public enum LensHiveErrorCode {
+
+  SEMANTIC_ERROR(4001, 10000), HIVE_ERROR(4002, 10000);
+
+  public LensErrorInfo getLensErrorInfo() {
+    return this.errorInfo;
+  }
+
+  LensHiveErrorCode(final int code, final int weight) {
+    this.errorInfo = new LensErrorInfo(code, weight, name());
+  }
+
+  private final LensErrorInfo errorInfo;
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/bf1053b4/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java b/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
index 18a8c8d..69c3f46 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/QueryAPIErrorResponseTest.java
@@ -160,7 +160,8 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
     final String testQuery = "select * from non_existing_table";
     Response response = estimate(target(), Optional.of(sessionId), Optional.of(testQuery));
 
-    final String expectedErrMsg = "Internal Server Error.";
+    final String expectedErrMsg = "Semantic Error : Error while compiling statement: "
+      + "FAILED: SemanticException [Error 10001]: Line 1:31 Table not found 'non_existing_table'";
 
     LensErrorTO childError1 = LensErrorTO.composedOf(INTERNAL_SERVER_ERROR.getValue(),
       expectedErrMsg, MOCK_STACK_TRACE);
@@ -170,7 +171,7 @@ public class QueryAPIErrorResponseTest extends LensJerseyTest {
     LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(INTERNAL_SERVER_ERROR.getValue(),
         expectedErrMsg, MOCK_STACK_TRACE, Arrays.asList(childError1, childError2));
 
-    ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(Status.INTERNAL_SERVER_ERROR,
+    ErrorResponseExpectedData expectedData = new ErrorResponseExpectedData(Status.BAD_REQUEST,
       expectedLensErrorTO);
 
     expectedData.verify(response);

http://git-wip-us.apache.org/repos/asf/lens/blob/bf1053b4/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index efef358..82afcdc 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -49,6 +49,7 @@ import org.apache.lens.api.result.LensErrorTO;
 import org.apache.lens.api.result.QueryCostTO;
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.driver.hive.HiveDriver;
+import org.apache.lens.driver.hive.LensHiveErrorCode;
 import org.apache.lens.server.LensJerseyTest;
 import org.apache.lens.server.LensServerTestUtil;
 import org.apache.lens.server.LensServices;
@@ -252,7 +253,7 @@ public class TestQueryService extends LensJerseyTest {
     mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), conf,
       MediaType.APPLICATION_XML_TYPE));
     final Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
-    assertEquals(response.getStatus(), INTERNAL_SERVER_ERROR.getStatusCode());
+    assertEquals(response.getStatus(), BAD_REQUEST.getStatusCode());
   }
 
   /**
@@ -416,7 +417,7 @@ public class TestQueryService extends LensJerseyTest {
 
     final Response responseExplain = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
 
-    assertEquals(responseExplain.getStatus(), INTERNAL_SERVER_ERROR.getStatusCode());
+    assertEquals(responseExplain.getStatus(), BAD_REQUEST.getStatusCode());
 
     // Test explain and prepare
     final WebTarget ptarget = target().path("queryapi/preparedqueries");
@@ -433,7 +434,38 @@ public class TestQueryService extends LensJerseyTest {
     final Response responseExplainAndPrepare = target.request().post(
       Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
 
-    assertEquals(responseExplainAndPrepare.getStatus(), INTERNAL_SERVER_ERROR.getStatusCode());
+    assertEquals(responseExplainAndPrepare.getStatus(), BAD_REQUEST.getStatusCode());
+  }
+
+  /**
+   * Test semantic error for hive query on non-existent table.
+   *
+   * @throws IOException          Signals that an I/O exception has occurred.
+   * @throws InterruptedException the interrupted exception
+   */
+  @Test
+  public void testHiveSemanticFailure() throws InterruptedException, IOException {
+    final WebTarget target = target().path("queryapi/queries");
+
+    final FormDataMultiPart mp = new FormDataMultiPart();
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), lensSessionId,
+      MediaType.APPLICATION_XML_TYPE));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("query").build(), " select ID from NOT_EXISTS"));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("operation").build(), "execute"));
+    mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(), new LensConf(),
+      MediaType.APPLICATION_XML_TYPE));
+
+    Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
+    LensAPIResult result = response.readEntity(LensAPIResult.class);
+    List<LensErrorTO> childErrors = result.getLensErrorTO().getChildErrors();
+    boolean hiveSemanticErrorExists=false;
+    for (LensErrorTO error : childErrors) {
+      if (error.getCode() == LensHiveErrorCode.SEMANTIC_ERROR.getLensErrorInfo().getErrorCode()) {
+        hiveSemanticErrorExists = true;
+        break;
+      }
+    }
+    assertTrue(hiveSemanticErrorExists);
   }
 
   // post to preparedqueries
@@ -1154,7 +1186,7 @@ public class TestQueryService extends LensJerseyTest {
       MediaType.APPLICATION_XML_TYPE));
 
     Response response = target.request().post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE));
-    assertEquals(response.getStatus(), INTERNAL_SERVER_ERROR.getStatusCode());
+    assertEquals(response.getStatus(), BAD_REQUEST.getStatusCode());
   }
 
   /**


[10/50] [abbrv] lens git commit: LENS-851 : Fix test failures on java8 for union queries

Posted by sh...@apache.org.
LENS-851 : Fix test failures on java8 for union queries


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/d820c32a
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/d820c32a
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/d820c32a

Branch: refs/heads/LENS-581
Commit: d820c32abd7e95bbd47c555772707df2fc2b786b
Parents: c445730
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Tue Nov 24 11:33:25 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Tue Nov 24 11:33:25 2015 +0530

----------------------------------------------------------------------
 .../src/main/java/org/apache/lens/cube/parse/CandidateFact.java    | 2 +-
 .../main/java/org/apache/lens/cube/parse/StorageTableResolver.java | 2 +-
 .../src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/d820c32a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 8a6aa00..7f81461 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -264,7 +264,7 @@ public class CandidateFact implements CandidateTable {
     String database = SessionState.get().getCurrentDatabase();
     // Add database name prefix for non default database
     if (StringUtils.isNotBlank(database) && !"default".equalsIgnoreCase(database)) {
-      Set<String> storageTbls = new HashSet<String>();
+      Set<String> storageTbls = new TreeSet<>();
       Iterator<String> names = storageTables.iterator();
       while (names.hasNext()) {
         storageTbls.add(database + "." + names.next());

http://git-wip-us.apache.org/repos/asf/lens/blob/d820c32a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index 4db1626..cc8e68c 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -480,7 +480,7 @@ class StorageTableResolver implements ContextRewriter {
         i.remove();
         continue;
       }
-      Set<String> storageTables = new LinkedHashSet<String>();
+      Set<String> storageTables = new LinkedHashSet<>();
       storageTables.addAll(minimalStorageTables.keySet());
       cfact.setStorageTables(storageTables);
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d820c32a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 494f81b..04b7ab1 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -563,7 +563,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
       // Union query
       String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
       System.out.println("HQL:" + hqlQuery);
-      ArrayList<String> storages = Lists.newArrayList("c1_testfact", "c3_testfact", "c2_testfact");
+      ArrayList<String> storages = Lists.newArrayList("c1_testfact", "c2_testfact", "c3_testfact");
       String expected = getExpectedUnionQuery(cubeName, storages, provider,
         "select sum(testcube.alias0)", null, null,
         "select sum(testcube.msr2) as `alias0` from ", null, null


[32/50] [abbrv] lens git commit: LENS-882: Provide option from CLI to list queries by driver

Posted by sh...@apache.org.
LENS-882: Provide option from CLI to list queries by driver


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/0e4c18cb
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/0e4c18cb
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/0e4c18cb

Branch: refs/heads/LENS-581
Commit: 0e4c18cb917015b469ca43d30d7e2196d8f64789
Parents: 22e2022
Author: Deepak Barr <de...@gmail.com>
Authored: Thu Dec 10 12:16:11 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Thu Dec 10 12:16:11 2015 +0530

----------------------------------------------------------------------
 .../lens/cli/commands/LensQueryCommands.java    |  4 ++-
 .../apache/lens/cli/TestLensQueryCommands.java  | 26 +++++++++++++-------
 .../java/org/apache/lens/client/LensClient.java |  5 ++--
 .../org/apache/lens/client/LensStatement.java   |  6 +++--
 src/site/apt/user/cli.apt                       |  2 +-
 5 files changed, 28 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/0e4c18cb/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
index fe9b84d..e3c08ff 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
@@ -218,6 +218,7 @@ public class LensQueryCommands extends BaseLensCommand {
    * @param state     the state
    * @param queryName the query name
    * @param user      the user
+   * @param driver    the driver name
    * @param fromDate  the from date
    * @param toDate    the to date
    * @return the all queries
@@ -229,11 +230,12 @@ public class LensQueryCommands extends BaseLensCommand {
     @CliOption(key = {"state"}, mandatory = false, help = "<query-status>") String state,
     @CliOption(key = {"name"}, mandatory = false, help = "<query-name>") String queryName,
     @CliOption(key = {"user"}, mandatory = false, help = "<user-who-submitted-query>") String user,
+    @CliOption(key = {"driver"}, mandatory = false, help = "<driver-where-query-ran>") String driver,
     @CliOption(key = {"fromDate"}, mandatory = false, unspecifiedDefaultValue = "-1", help
       = "<submission-time-is-after>") long fromDate,
     @CliOption(key = {"toDate"}, mandatory = false, unspecifiedDefaultValue = "" + Long.MAX_VALUE, help
       = "<submission-time-is-before>") long toDate) {
-    List<QueryHandle> handles = getClient().getQueries(state, queryName, user, fromDate, toDate);
+    List<QueryHandle> handles = getClient().getQueries(state, queryName, user, driver, fromDate, toDate);
     if (handles != null && !handles.isEmpty()) {
       return Joiner.on("\n").skipNulls().join(handles).concat("\n").concat("Total number of queries: "
         + handles.size());

http://git-wip-us.apache.org/repos/asf/lens/blob/0e4c18cb/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
index 48b3ebb..6ebfff7 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensQueryCommands.java
@@ -230,7 +230,7 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
     String qh = qCom.executeQuery(sql, true, "testQuery1");
     String user = qCom.getClient().getLensStatement(new QueryHandle(UUID.fromString(qh)))
         .getQuery().getSubmittedUser();
-    String result = qCom.getAllQueries("", "testQuery1", user, -1, Long.MAX_VALUE);
+    String result = qCom.getAllQueries("", "testQuery1", user, "", -1, Long.MAX_VALUE);
     // this is because previous query has run two query handle will be there
     assertTrue(result.contains(qh), result);
     assertTrue(result.contains("Total number of queries"));
@@ -249,7 +249,7 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
     }
 
     // Check that query name searching is 'ilike'
-    String result2 = qCom.getAllQueries("", "query", "all", -1, Long.MAX_VALUE);
+    String result2 = qCom.getAllQueries("", "query", "all", "", -1, Long.MAX_VALUE);
     assertTrue(result2.contains(qh), result2);
 
     assertTrue(qCom.getStatus(qh).contains("Status : SUCCESSFUL"));
@@ -265,10 +265,10 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
 
     // Kill query is not tested as there is no deterministic way of killing a query
 
-    result = qCom.getAllQueries("SUCCESSFUL", "", "all", -1, Long.MAX_VALUE);
+    result = qCom.getAllQueries("SUCCESSFUL", "", "all", "", -1, Long.MAX_VALUE);
     assertTrue(result.contains(qh), result);
 
-    result = qCom.getAllQueries("FAILED", "", "all", -1, Long.MAX_VALUE);
+    result = qCom.getAllQueries("FAILED", "", "all", "", -1, Long.MAX_VALUE);
     if (!result.contains("No queries")) {
       // Make sure valid query handles are returned
       String[] handles = StringUtils.split(result, "\n");
@@ -282,22 +282,30 @@ public class TestLensQueryCommands extends LensCliApplicationTest {
     String queryName = qCom.getClient().getLensStatement(new QueryHandle(UUID.fromString(qh))).getQuery()
             .getQueryName();
     assertTrue("testQuery1".equalsIgnoreCase(queryName), queryName);
-    result = qCom.getAllQueries("", "", "", submitTime, System.currentTimeMillis());
+    result = qCom.getAllQueries("", "", "", "", submitTime, System.currentTimeMillis());
     assertTrue(result.contains(qh), result);
 
-    result = qCom.getAllQueries("", "fooBar", "all", submitTime, System.currentTimeMillis());
+    result = qCom.getAllQueries("", "fooBar", "all", "", submitTime, System.currentTimeMillis());
     assertTrue(result.contains("No queries"), result);
 
-    result = qCom.getAllQueries("SUCCESSFUL", "", "all", submitTime, System.currentTimeMillis());
+    result = qCom.getAllQueries("SUCCESSFUL", "", "all", "", submitTime, System.currentTimeMillis());
     assertTrue(result.contains(qh));
 
-    result = qCom.getAllQueries("SUCCESSFUL", "", "all", submitTime - 5000, submitTime - 1);
+    result = qCom.getAllQueries("SUCCESSFUL", "", "all", "", submitTime - 5000, submitTime - 1);
     // should not give query since its not in the range
     assertFalse(result.contains(qh));
 
+    // Filters on driver
+    result = qCom.getAllQueries("SUCCESSFUL", "", "all", "hive/hive1", submitTime,
+      System.currentTimeMillis());
+    assertTrue(result.contains(qh));
+
+    result = qCom.getAllQueries("SUCCESSFUL", "", "all", "DummyDriver", submitTime, System.currentTimeMillis());
+    assertFalse(result.contains(qh));
+
     try {
       // Should fail with bad request since fromDate > toDate
-      result = qCom.getAllQueries("SUCCESSFUL", "", "all", submitTime + 5000, submitTime);
+      result = qCom.getAllQueries("SUCCESSFUL", "", "all", "", submitTime + 5000, submitTime);
       fail("Call should have failed with BadRequestException, instead got " + result);
     } catch (BadRequestException exc) {
       // pass

http://git-wip-us.apache.org/repos/asf/lens/blob/0e4c18cb/lens-client/src/main/java/org/apache/lens/client/LensClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensClient.java b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
index 5fd04c4..8f197e4 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensClient.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
@@ -226,8 +226,9 @@ public class LensClient {
     return getLensStatement(query).getResultSet();
   }
 
-  public List<QueryHandle> getQueries(String state, String queryName, String user, long fromDate, long toDate) {
-    return new LensStatement(connection).getAllQueries(state, queryName, user, fromDate, toDate);
+  public List<QueryHandle> getQueries(String state, String queryName, String user, String driver, long fromDate,
+    long toDate) {
+    return new LensStatement(connection).getAllQueries(state, queryName, user, driver, fromDate, toDate);
   }
 
   private void connectToLensServer() {

http://git-wip-us.apache.org/repos/asf/lens/blob/0e4c18cb/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensStatement.java b/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
index 40e6d76..71caa48 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensStatement.java
@@ -382,15 +382,17 @@ public class LensStatement {
    * @param state     the state
    * @param queryName the query name
    * @param user      the user
+   * @param driver    the driver name
    * @param fromDate  the from date
    * @param toDate    the to date
    * @return the all queries
    */
-  public List<QueryHandle> getAllQueries(String state, String queryName, String user, long fromDate, long toDate) {
+  public List<QueryHandle> getAllQueries(String state, String queryName, String user, String driver, long fromDate,
+    long toDate) {
     WebTarget target = getQueryWebTarget(connection.buildClient());
     List<QueryHandle> handles = target.queryParam("sessionid", connection.getSessionHandle())
       .queryParam("state", state).queryParam("queryName", queryName).queryParam("user", user)
-      .queryParam("fromDate", fromDate).queryParam("toDate", toDate).request()
+      .queryParam("driver", driver).queryParam("fromDate", fromDate).queryParam("toDate", toDate).request()
       .get(new GenericType<List<QueryHandle>>() {
       });
     return handles;

http://git-wip-us.apache.org/repos/asf/lens/blob/0e4c18cb/src/site/apt/user/cli.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/user/cli.apt b/src/site/apt/user/cli.apt
index c266c6d..3db53c4 100644
--- a/src/site/apt/user/cli.apt
+++ b/src/site/apt/user/cli.apt
@@ -355,7 +355,7 @@ User CLI Commands
 *--+--+
 |query kill [--query_handle] \<query_handle\>|Kill query with handle <<<query_handle>>>|
 *--+--+
-|query list [--state \<query-status\>] [--name \<query-name\>] [--user \<user-who-submitted-query\>] [--fromDate \<submission-time-is-after\>] [--toDate \<submission-time-is-before\>]|Get all queries. Various filter options can be provided(optionally),  as can be seen from the command syntax|
+|query list [--state \<query-status\>] [--name \<query-name\>] [--user \<user-who-submitted-query\>] [--driver \<driver-where-query-was-executed\>] [--fromDate \<submission-time-is-after\>] [--toDate \<submission-time-is-before\>]|Get all queries. Various filter options can be provided(optionally),  as can be seen from the command syntax|
 *--+--+
 |query results [--query_handle] \<query_handle\> [--save_location \<save_location\>] [--async \<async\>]|get results of query with query handle <<<query_handle>>>. If async is false then wait till the query execution is completed, it's by default true. Can optionally save the results to a file by providing <<<save_location>>>.|
 *--+--+


[13/50] [abbrv] lens git commit: LENS-865: Add/delete partition throws NPE when a part col doesn't exist in the table

Posted by sh...@apache.org.
LENS-865: Add/delete partition throws NPE when a part col doesn't exist in the table


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/e5691d8d
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/e5691d8d
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/e5691d8d

Branch: refs/heads/LENS-581
Commit: e5691d8d655c94cdd64e0d83028ec59735d73edc
Parents: 09baa12
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Tue Nov 24 12:07:17 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Tue Nov 24 12:07:17 2015 +0530

----------------------------------------------------------------------
 .../java/org/apache/lens/api/APIResult.java     |  10 +-
 lens-api/src/main/resources/lens-errors.conf    |   6 +
 .../lens/cube/error/LensCubeErrorCode.java      |   5 +-
 .../lens/cube/metadata/CubeMetastoreClient.java |  42 +-
 .../cube/metadata/TestCubeMetastoreClient.java  | 404 ++++++++++---------
 .../lens/server/api/error/LensException.java    |   8 +-
 .../server/metastore/MetastoreResource.java     | 384 ++++++++++--------
 .../server/metastore/TestMetastoreService.java  |  40 +-
 8 files changed, 498 insertions(+), 401 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/e5691d8d/lens-api/src/main/java/org/apache/lens/api/APIResult.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/APIResult.java b/lens-api/src/main/java/org/apache/lens/api/APIResult.java
index 06c608a..0cdee0d 100644
--- a/lens-api/src/main/java/org/apache/lens/api/APIResult.java
+++ b/lens-api/src/main/java/org/apache/lens/api/APIResult.java
@@ -152,11 +152,13 @@ public class APIResult {
   }
 
   private static String extractCause(Throwable e) {
-    String cause = null;
-    while ((cause == null || cause.isEmpty()) && e != null) {
-      cause = e.getMessage();
+    StringBuilder cause = new StringBuilder();
+    String sep = "";
+    while (e != null) {
+      cause.append(sep).append(e.getMessage());
       e = e.getCause();
+      sep = ": ";
     }
-    return cause;
+    return cause.toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/e5691d8d/lens-api/src/main/resources/lens-errors.conf
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/lens-errors.conf b/lens-api/src/main/resources/lens-errors.conf
index f50433a..ca8562f 100644
--- a/lens-api/src/main/resources/lens-errors.conf
+++ b/lens-api/src/main/resources/lens-errors.conf
@@ -296,6 +296,12 @@ lensCubeErrorsForMetastore = [
     httpStatusCode = ${BAD_REQUEST}
     errorMsg = "Problem in submitting entity: %s"
   }
+
+  {
+      errorCode = 3102
+      httpStatusCode = ${BAD_REQUEST}
+      errorMsg = "No timeline found for fact=%s, storage=%s, update period=%s, partition column=%s."
+  }
 ]
 
 lensCubeErrors = ${lensCubeErrorsForQuery}${lensCubeErrorsForMetastore}

http://git-wip-us.apache.org/repos/asf/lens/blob/e5691d8d/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
index 24fb80b..6c5dc2f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
@@ -21,6 +21,7 @@ package org.apache.lens.cube.error;
 import org.apache.lens.server.api.LensErrorInfo;
 
 public enum LensCubeErrorCode {
+  // Error codes less than 3100 are errors encountered while submitting a query
   // Error codes same for drivers
   SYNTAX_ERROR(3001, 0),
   FIELDS_CANNOT_BE_QUERIED_TOGETHER(3002, 0),
@@ -54,7 +55,9 @@ public enum LensCubeErrorCode {
   NO_CANDIDATE_DIM_STORAGE_TABLES(3029, 1300),
   NO_STORAGE_TABLE_AVAIABLE(3030, 1400),
   STORAGE_UNION_DISABLED(3031, 100),
-  ERROR_IN_ENTITY_DEFINITION(3101, 100);
+  // Error codes greater than 3100 are errors while doing a metastore operation.
+  ERROR_IN_ENTITY_DEFINITION(3101, 100),
+  TIMELINE_ABSENT(3102, 100);
 
   public LensErrorInfo getLensErrorInfo() {
     return this.errorInfo;

http://git-wip-us.apache.org/repos/asf/lens/blob/e5691d8d/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index f945e0f..1f13617 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -25,6 +25,7 @@ import java.text.ParseException;
 import java.util.*;
 import java.util.concurrent.ConcurrentHashMap;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.Storage.LatestInfo;
 import org.apache.lens.cube.metadata.Storage.LatestPartColumnInfo;
 import org.apache.lens.cube.metadata.timeline.PartitionTimeline;
@@ -115,8 +116,7 @@ public class CubeMetastoreClient {
     for (CubeFactTable fact : getAllFacts(cube)) {
       for (String storage : fact.getStorages()) {
         for (UpdatePeriod updatePeriod : fact.getUpdatePeriods().get(storage)) {
-          PartitionTimeline timeline = partitionTimelineCache.get(fact.getName(), storage, updatePeriod,
-            partCol);
+          PartitionTimeline timeline = partitionTimelineCache.get(fact.getName(), storage, updatePeriod, partCol);
           if (timeline != null) {// this storage table is partitioned by partCol or not.
             Date latest = timeline.getLatestDate();
             if (latest != null && latest.after(max)) {
@@ -424,8 +424,8 @@ public class CubeMetastoreClient {
     /** check partition existence in the appropriate timeline if it exists */
     public boolean partitionTimeExists(String name, String storage, UpdatePeriod period, String partCol, Date partSpec)
       throws HiveException, LensException {
-      return get(name, storage, period, partCol) != null && get(name, storage, period, partCol).exists(TimePartition.of(
-        period, partSpec));
+      return get(name, storage, period, partCol) != null
+        && get(name, storage, period, partCol).exists(TimePartition.of(period, partSpec));
     }
 
     /**
@@ -437,12 +437,29 @@ public class CubeMetastoreClient {
       return get(fact, storage) != null && get(fact, storage).get(updatePeriod) != null && get(fact, storage).get(
         updatePeriod).get(partCol) != null ? get(fact, storage).get(updatePeriod).get(partCol) : null;
     }
+    /**
+     * returns the timeline corresponding to fact-storage table, updatePeriod, partCol. throws exception if not
+     * exists, which would most probably mean the combination is incorrect.
+     */
+    public PartitionTimeline getAndFailFast(String fact, String storage, UpdatePeriod updatePeriod, String partCol)
+      throws HiveException, LensException {
+      PartitionTimeline timeline = get(fact, storage, updatePeriod, partCol);
+      if (timeline == null) {
+        throw new LensException(LensCubeErrorCode.TIMELINE_ABSENT.getLensErrorInfo(),
+          fact, storage, updatePeriod, partCol);
+      }
+      return timeline;
+    }
+
 
     /** update partition timeline cache for addition of time partition */
     public void updateForAddition(String cubeTableName, String storageName, UpdatePeriod updatePeriod,
       Map<String, TreeSet<Date>> timePartSpec) throws HiveException, LensException {
+      // fail fast. All part cols mentioned in all partitions should exist.
+      for (String partCol : timePartSpec.keySet()) {
+        getAndFailFast(cubeTableName, storageName, updatePeriod, partCol);
+      }
       for (Map.Entry<String, TreeSet<Date>> entry : timePartSpec.entrySet()) {
-        //Assume timelines has all the time part columns.
         for (Date dt : entry.getValue()) {
           get(cubeTableName, storageName, updatePeriod, entry.getKey()).add(TimePartition.of(updatePeriod, dt));
         }
@@ -452,6 +469,10 @@ public class CubeMetastoreClient {
     /** update partition timeline cache for deletion of time partition */
     public boolean updateForDeletion(String cubeTableName, String storageName, UpdatePeriod updatePeriod,
       Map<String, Date> timePartSpec) throws HiveException, LensException {
+      // fail fast. All part cols mentioned in all partitions should exist.
+      for (String partCol : timePartSpec.keySet()) {
+        getAndFailFast(cubeTableName, storageName, updatePeriod, partCol);
+      }
       boolean updated = false;
       for (Map.Entry<String, Date> entry : timePartSpec.entrySet()) {
         TimePartition part = TimePartition.of(updatePeriod, entry.getValue());
@@ -1209,13 +1230,14 @@ public class CubeMetastoreClient {
     throws HiveException, LensException {
     String storageTableName = MetastoreUtil.getStorageTableName(factOrDimTblName, Storage.getPrefix(storageName));
     if (isDimensionTable(factOrDimTblName)) {
-      return dimLatestPartitionExists(storageTableName, StorageConstants.getLatestPartFilter(latestPartCol));
+      return dimTableLatestPartitionExists(storageTableName);
     } else {
       return !partitionTimelineCache.noPartitionsExist(factOrDimTblName, storageName, latestPartCol);
     }
   }
 
-  private boolean dimLatestPartitionExists(String storageTableName, String latestPartCol) throws HiveException {
+  private boolean dimTableLatestPartitionExistsInMetastore(String storageTableName, String latestPartCol)
+    throws HiveException {
     return partitionExistsByFilter(storageTableName, StorageConstants.getLatestPartFilter(latestPartCol));
   }
 
@@ -1223,10 +1245,6 @@ public class CubeMetastoreClient {
     return latestLookupCache.contains(storageTableName.trim().toLowerCase());
   }
 
-  Partition getLatestPart(String storageTableName, String latestPartCol) throws HiveException {
-    return getLatestPart(storageTableName, latestPartCol, null);
-  }
-
   Partition getLatestPart(String storageTableName, String latestPartCol, Map<String, String> nonTimeParts)
     throws HiveException {
     List<Partition> latestParts =
@@ -1471,7 +1489,7 @@ public class CubeMetastoreClient {
                   if (dimTable.hasStorageSnapshots(storageName)) {
                     String storageTableName = getFactOrDimtableStorageTableName(dimTable.getName(),
                       storageName);
-                    if (dimLatestPartitionExists(storageTableName,
+                    if (dimTableLatestPartitionExistsInMetastore(storageTableName,
                       getDimension(dimTable.getDimName()).getTimedDimension())) {
                       latestLookupCache.add(storageTableName.trim().toLowerCase());
                     }

http://git-wip-us.apache.org/repos/asf/lens/blob/e5691d8d/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index 6a2dc50..6b6f645 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -19,10 +19,16 @@
 
 package org.apache.lens.cube.metadata;
 
+import static org.apache.lens.cube.metadata.UpdatePeriod.DAILY;
+import static org.apache.lens.cube.metadata.UpdatePeriod.HOURLY;
+import static org.apache.lens.cube.metadata.UpdatePeriod.MONTHLY;
+
 import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.fail;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
 import org.apache.lens.cube.metadata.ReferencedDimAtrribute.ChainRefCol;
 import org.apache.lens.cube.metadata.timeline.EndsAndHolesPartitionTimeline;
@@ -780,8 +786,8 @@ public class TestCubeMetastoreClient {
     List<String> timePartCols = new ArrayList<String>();
     partCols.add(getDatePartition());
     timePartCols.add(getDatePartitionKey());
-    updates.add(UpdatePeriod.HOURLY);
-    updates.add(UpdatePeriod.DAILY);
+    updates.add(HOURLY);
+    updates.add(DAILY);
     StorageTableDesc s1 = new StorageTableDesc();
     s1.setInputFormat(TextInputFormat.class.getCanonicalName());
     s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
@@ -813,10 +819,20 @@ public class TestCubeMetastoreClient {
 
     Map<String, Date> timeParts = new HashMap<String, Date>();
     timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
+    timeParts.put("non_existing_part_col", now);
+    // test error on adding invalid partition
     // test partition
-    StoragePartitionDesc partSpec = new StoragePartitionDesc(cubeFact.getName(), timeParts, null, UpdatePeriod.HOURLY);
+    StoragePartitionDesc partSpec = new StoragePartitionDesc(cubeFact.getName(), timeParts, null, HOURLY);
+    try{
+      client.addPartition(partSpec, c1);
+      fail("Add should fail since non_existing_part_col is non-existing");
+    } catch(LensException e){
+      assertEquals(e.getErrorCode(), LensCubeErrorCode.TIMELINE_ABSENT.getLensErrorInfo().getErrorCode());
+    }
+    timeParts.remove("non_existing_part_col");
+    partSpec = new StoragePartitionDesc(cubeFact.getName(), timeParts, null, HOURLY);
     client.addPartition(partSpec, c1);
-    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, UpdatePeriod.HOURLY, timeParts,
+    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
       new HashMap<String, String>()));
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -833,36 +849,36 @@ public class TestCubeMetastoreClient {
     Map<String, Date> timeParts2 = new HashMap<String, Date>();
     timeParts2.put(TestCubeMetastoreClient.getDatePartitionKey(), nowPlus1);
     StoragePartitionDesc partSpec2 =
-      new StoragePartitionDesc(cubeFact.getName(), timeParts2, null, UpdatePeriod.HOURLY);
+      new StoragePartitionDesc(cubeFact.getName(), timeParts2, null, HOURLY);
     partSpec2.setInputFormat(SequenceFileInputFormat.class.getCanonicalName());
     partSpec2.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
     client.addPartition(partSpec2, c1);
     assertEquals(client.getAllParts(storageTableName).size(), 2);
-    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, UpdatePeriod.HOURLY, timeParts,
+    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
       new HashMap<String, String>()));
-    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, UpdatePeriod.HOURLY, timeParts2,
+    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts2,
       new HashMap<String, String>()));
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
     parts = client.getPartitionsByFilter(storageTableName, "dt='latest'");
     assertEquals(parts.size(), 0);
 
-    client.dropPartition(cubeFact.getName(), c1, timeParts2, null, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeFact.getName(), c1, timeParts2, null, HOURLY);
     assertEquals(client.getAllParts(storageTableName).size(), 1);
-    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, UpdatePeriod.HOURLY, timeParts,
+    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
       new HashMap<String, String>()));
-    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, UpdatePeriod.HOURLY, timeParts2,
+    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts2,
       new HashMap<String, String>()));
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
     parts = client.getPartitionsByFilter(storageTableName, "dt='latest'");
     assertEquals(parts.size(), 0);
 
-    client.dropPartition(cubeFact.getName(), c1, timeParts, null, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeFact.getName(), c1, timeParts, null, HOURLY);
     assertEquals(client.getAllParts(storageTableName).size(), 0);
-    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, UpdatePeriod.HOURLY, timeParts,
+    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
       new HashMap<String, String>()));
-    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, UpdatePeriod.HOURLY, timeParts2,
+    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts2,
       new HashMap<String, String>()));
     Assert.assertFalse(client.latestPartitionExists(cubeFact.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -881,15 +897,15 @@ public class TestCubeMetastoreClient {
 
     Map<String, Set<UpdatePeriod>> updatePeriods = new HashMap<String, Set<UpdatePeriod>>();
     Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
-    updates.add(UpdatePeriod.HOURLY);
-    updates.add(UpdatePeriod.DAILY);
+    updates.add(HOURLY);
+    updates.add(DAILY);
 
     ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
     List<String> timePartCols = new ArrayList<String>();
     partCols.add(getDatePartition());
     timePartCols.add(getDatePartitionKey());
-    updates.add(UpdatePeriod.HOURLY);
-    updates.add(UpdatePeriod.DAILY);
+    updates.add(HOURLY);
+    updates.add(DAILY);
     StorageTableDesc s1 = new StorageTableDesc();
     s1.setInputFormat(TextInputFormat.class.getCanonicalName());
     s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
@@ -913,12 +929,12 @@ public class TestCubeMetastoreClient {
     Map<String, String> newProp = new HashMap<String, String>();
     newProp.put("new.prop", "val");
     factTable.addProperties(newProp);
-    factTable.addUpdatePeriod(c1, UpdatePeriod.MONTHLY);
-    factTable.removeUpdatePeriod(c1, UpdatePeriod.HOURLY);
+    factTable.addUpdatePeriod(c1, MONTHLY);
+    factTable.removeUpdatePeriod(c1, HOURLY);
     Set<UpdatePeriod> alterupdates = new HashSet<UpdatePeriod>();
-    alterupdates.add(UpdatePeriod.HOURLY);
-    alterupdates.add(UpdatePeriod.DAILY);
-    alterupdates.add(UpdatePeriod.MONTHLY);
+    alterupdates.add(HOURLY);
+    alterupdates.add(DAILY);
+    alterupdates.add(MONTHLY);
     factTable.alterStorage(c2, alterupdates);
 
     client.alterCubeFactTable(factName, factTable, storageTables);
@@ -928,11 +944,11 @@ public class TestCubeMetastoreClient {
 
     Assert.assertTrue(altered.weight() == 100L);
     Assert.assertTrue(altered.getProperties().get("new.prop").equals("val"));
-    Assert.assertTrue(altered.getUpdatePeriods().get(c1).contains(UpdatePeriod.MONTHLY));
-    Assert.assertFalse(altered.getUpdatePeriods().get(c1).contains(UpdatePeriod.HOURLY));
-    Assert.assertTrue(altered.getUpdatePeriods().get(c2).contains(UpdatePeriod.MONTHLY));
-    Assert.assertTrue(altered.getUpdatePeriods().get(c2).contains(UpdatePeriod.DAILY));
-    Assert.assertTrue(altered.getUpdatePeriods().get(c2).contains(UpdatePeriod.HOURLY));
+    Assert.assertTrue(altered.getUpdatePeriods().get(c1).contains(MONTHLY));
+    Assert.assertFalse(altered.getUpdatePeriods().get(c1).contains(HOURLY));
+    Assert.assertTrue(altered.getUpdatePeriods().get(c2).contains(MONTHLY));
+    Assert.assertTrue(altered.getUpdatePeriods().get(c2).contains(DAILY));
+    Assert.assertTrue(altered.getUpdatePeriods().get(c2).contains(HOURLY));
     Assert.assertTrue(altered.getCubeName().equalsIgnoreCase(CUBE_NAME.toLowerCase()));
     boolean contains = false;
     for (FieldSchema column : altered.getColumns()) {
@@ -1026,8 +1042,8 @@ public class TestCubeMetastoreClient {
 
     Map<String, Set<UpdatePeriod>> updatePeriods = new HashMap<String, Set<UpdatePeriod>>();
     Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
-    updates.add(UpdatePeriod.HOURLY);
-    updates.add(UpdatePeriod.DAILY);
+    updates.add(HOURLY);
+    updates.add(DAILY);
     FieldSchema testDtPart = new FieldSchema("mydate", "string", "date part");
     ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
     List<String> timePartCols = new ArrayList<String>();
@@ -1071,9 +1087,9 @@ public class TestCubeMetastoreClient {
     Map<String, Date> timeParts = new HashMap<String, Date>();
     timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
     timeParts.put(testDtPart.getName(), testDt);
-    StoragePartitionDesc partSpec = new StoragePartitionDesc(cubeFact.getName(), timeParts, null, UpdatePeriod.HOURLY);
+    StoragePartitionDesc partSpec = new StoragePartitionDesc(cubeFact.getName(), timeParts, null, HOURLY);
     client.addPartition(partSpec, c1);
-    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, UpdatePeriod.HOURLY, timeParts,
+    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
       new HashMap<String, String>()));
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, testDtPart.getName()));
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
@@ -1086,9 +1102,9 @@ public class TestCubeMetastoreClient {
     parts = client.getPartitionsByFilter(storageTableName, testDtPart.getName() + "='latest'");
     assertEquals(parts.size(), 0);
 
-    client.dropPartition(cubeFact.getName(), c1, timeParts, null, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeFact.getName(), c1, timeParts, null, HOURLY);
     assertEquals(client.getAllParts(storageTableName).size(), 0);
-    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, UpdatePeriod.HOURLY, timeParts,
+    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
       new HashMap<String, String>()));
     Assert.assertFalse(client.latestPartitionExists(cubeFact.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1108,8 +1124,8 @@ public class TestCubeMetastoreClient {
 
     Map<String, Set<UpdatePeriod>> updatePeriods = new HashMap<String, Set<UpdatePeriod>>();
     Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
-    updates.add(UpdatePeriod.HOURLY);
-    updates.add(UpdatePeriod.DAILY);
+    updates.add(HOURLY);
+    updates.add(DAILY);
     FieldSchema itPart = new FieldSchema("it", "string", "date part");
     FieldSchema etPart = new FieldSchema("et", "string", "date part");
     ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
@@ -1158,35 +1174,35 @@ public class TestCubeMetastoreClient {
 
     Table c1Table = client.getHiveTable(c1TableName);
     Table c2Table = client.getHiveTable(c2TableName);
-    c2Table.getParameters().put(MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.HOURLY,
+    c2Table.getParameters().put(MetastoreUtil.getPartitionTimelineStorageClassKey(HOURLY,
       getDatePartitionKey()), StoreAllPartitionTimeline.class.getCanonicalName());
-    c2Table.getParameters().put(MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.HOURLY,
+    c2Table.getParameters().put(MetastoreUtil.getPartitionTimelineStorageClassKey(HOURLY,
       itPart.getName()), StoreAllPartitionTimeline.class.getCanonicalName());
-    c2Table.getParameters().put(MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.HOURLY,
+    c2Table.getParameters().put(MetastoreUtil.getPartitionTimelineStorageClassKey(HOURLY,
       etPart.getName()), StoreAllPartitionTimeline.class.getCanonicalName());
     client.pushHiveTable(c2Table);
 
     // same before insertion.
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
     EndsAndHolesPartitionTimeline timelineDt = ((EndsAndHolesPartitionTimeline) client.partitionTimelineCache.get(
-      factName, c1, UpdatePeriod.HOURLY, getDatePartitionKey()));
+      factName, c1, HOURLY, getDatePartitionKey()));
     EndsAndHolesPartitionTimeline timelineIt = ((EndsAndHolesPartitionTimeline) client.partitionTimelineCache.get(
-      factName, c1, UpdatePeriod.HOURLY, itPart.getName()));
+      factName, c1, HOURLY, itPart.getName()));
     EndsAndHolesPartitionTimeline timelineEt = ((EndsAndHolesPartitionTimeline) client.partitionTimelineCache.get(
-      factName, c1, UpdatePeriod.HOURLY, etPart.getName()));
+      factName, c1, HOURLY, etPart.getName()));
     StoreAllPartitionTimeline timelineDtC2 = ((StoreAllPartitionTimeline) client.partitionTimelineCache.get(
-      factName, c2, UpdatePeriod.HOURLY, getDatePartitionKey()));
+      factName, c2, HOURLY, getDatePartitionKey()));
     StoreAllPartitionTimeline timelineItC2 = ((StoreAllPartitionTimeline) client.partitionTimelineCache.get(
-      factName, c2, UpdatePeriod.HOURLY, itPart.getName()));
+      factName, c2, HOURLY, itPart.getName()));
     StoreAllPartitionTimeline timelineEtC2 = ((StoreAllPartitionTimeline) client.partitionTimelineCache.get(
-      factName, c2, UpdatePeriod.HOURLY, etPart.getName()));
+      factName, c2, HOURLY, etPart.getName()));
 
     Map<String, Date> timeParts1 = new HashMap<String, Date>();
     timeParts1.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
     timeParts1.put(itPart.getName(), now);
     timeParts1.put(etPart.getName(), now);
     StoragePartitionDesc partSpec1 = new StoragePartitionDesc(cubeFact.getName(), timeParts1, null,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
     Map<String, Date> timeParts2 = new HashMap<String, Date>();
     timeParts2.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
@@ -1194,19 +1210,19 @@ public class TestCubeMetastoreClient {
     Map<String, String> nonTimeSpec = new HashMap<String, String>();
     nonTimeSpec.put(itPart.getName(), "default");
     final StoragePartitionDesc partSpec2 = new StoragePartitionDesc(cubeFact.getName(), timeParts2, nonTimeSpec,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
     Map<String, Date> timeParts3 = new HashMap<String, Date>();
     timeParts3.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
     timeParts3.put(etPart.getName(), now);
     final StoragePartitionDesc partSpec3 = new StoragePartitionDesc(cubeFact.getName(), timeParts3, nonTimeSpec,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
     client.addPartitions(Arrays.asList(partSpec1, partSpec2, partSpec3), c1);
     client.addPartitions(Arrays.asList(partSpec1, partSpec2, partSpec3), c2);
-    PartitionTimeline timeline1Temp = client.partitionTimelineCache.get(factName, c1, UpdatePeriod.HOURLY,
+    PartitionTimeline timeline1Temp = client.partitionTimelineCache.get(factName, c1, HOURLY,
       getDatePartitionKey());
-    PartitionTimeline timeline2Temp = client.partitionTimelineCache.get(factName, c2, UpdatePeriod.HOURLY,
+    PartitionTimeline timeline2Temp = client.partitionTimelineCache.get(factName, c2, HOURLY,
       getDatePartitionKey());
 
     assertEquals(timeline1Temp.getClass(), EndsAndHolesPartitionTimeline.class);
@@ -1215,11 +1231,11 @@ public class TestCubeMetastoreClient {
     assertEquals(client.getAllParts(c1TableName).size(), 3);
     assertEquals(client.getAllParts(c2TableName).size(), 3);
 
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
 
-    assertTimeline(timelineDt, timelineDtC2, UpdatePeriod.HOURLY, now, now);
-    assertTimeline(timelineEt, timelineEtC2, UpdatePeriod.HOURLY, now, nowPlus1);
-    assertTimeline(timelineIt, timelineItC2, UpdatePeriod.HOURLY, now, now);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, now, now);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, now, nowPlus1);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, now, now);
 
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1234,7 +1250,7 @@ public class TestCubeMetastoreClient {
     timeParts4.put(itPart.getName(), nowPlus1);
     timeParts4.put(etPart.getName(), nowMinus1);
     final StoragePartitionDesc partSpec4 = new StoragePartitionDesc(cubeFact.getName(), timeParts4, null,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
 
     Map<String, Date> timeParts5 = new HashMap<String, Date>();
@@ -1242,7 +1258,7 @@ public class TestCubeMetastoreClient {
     timeParts5.put(itPart.getName(), nowMinus1);
     timeParts5.put(etPart.getName(), nowMinus2);
     final StoragePartitionDesc partSpec5 = new StoragePartitionDesc(cubeFact.getName(), timeParts5, null,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
     client.addPartitions(Arrays.asList(partSpec4, partSpec5), c1);
     client.addPartitions(Arrays.asList(partSpec4, partSpec5), c2);
@@ -1250,11 +1266,11 @@ public class TestCubeMetastoreClient {
     assertEquals(client.getAllParts(c1TableName).size(), 5);
     assertEquals(client.getAllParts(c2TableName).size(), 5);
 
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
 
-    assertTimeline(timelineDt, timelineDtC2, UpdatePeriod.HOURLY, now, nowPlus1);
-    assertTimeline(timelineEt, timelineEtC2, UpdatePeriod.HOURLY, nowMinus2, nowPlus1);
-    assertTimeline(timelineIt, timelineItC2, UpdatePeriod.HOURLY, nowMinus1, nowPlus1);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, now, nowPlus1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus2, nowPlus1);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus1, nowPlus1);
 
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1266,7 +1282,7 @@ public class TestCubeMetastoreClient {
     timeParts6.put(itPart.getName(), nowMinus1);
     timeParts6.put(etPart.getName(), nowMinus2);
     final StoragePartitionDesc partSpec6 = new StoragePartitionDesc(cubeFact.getName(), timeParts6, null,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
     client.addPartition(partSpec6, c1);
     client.addPartition(partSpec6, c2);
@@ -1274,18 +1290,18 @@ public class TestCubeMetastoreClient {
     assertEquals(client.getAllParts(c1TableName).size(), 6);
     assertEquals(client.getAllParts(c2TableName).size(), 6);
 
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
 
-    assertTimeline(timelineDt, timelineDtC2, UpdatePeriod.HOURLY, nowMinus2, nowPlus1, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, UpdatePeriod.HOURLY, nowMinus2, nowPlus1);
-    assertTimeline(timelineIt, timelineItC2, UpdatePeriod.HOURLY, nowMinus1, nowPlus1);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus2, nowPlus1, nowMinus1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus2, nowPlus1);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus1, nowPlus1);
 
     Map<String, Date> timeParts7 = new HashMap<String, Date>();
     timeParts7.put(TestCubeMetastoreClient.getDatePartitionKey(), nowMinus5);
     timeParts7.put(itPart.getName(), nowMinus5);
     timeParts7.put(etPart.getName(), nowMinus5);
     final StoragePartitionDesc partSpec7 = new StoragePartitionDesc(cubeFact.getName(), timeParts7, null,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
     client.addPartition(partSpec7, c1);
     client.addPartition(partSpec7, c2);
@@ -1312,14 +1328,14 @@ public class TestCubeMetastoreClient {
     client.updatePartitions(factName, c2, c2Parts);
     assertSamePartitions(client.getAllParts(c2TableName), c2Parts);
 
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
 
-    assertTimeline(timelineDt, timelineDtC2, UpdatePeriod.HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, UpdatePeriod.HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3);
-    assertTimeline(timelineIt, timelineItC2, UpdatePeriod.HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3, nowMinus2);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3, nowMinus1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3, nowMinus2);
 
     assertNoPartitionNamedLatest(c1TableName, partColNames);
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
     assertEquals(Hive.get(client.getConf()).getTable(c1TableName).getParameters().get(
       MetastoreUtil.getPartitionTimelineCachePresenceKey()), "true");
     assertEquals(Hive.get(client.getConf()).getTable(c2TableName).getParameters().get(
@@ -1327,15 +1343,15 @@ public class TestCubeMetastoreClient {
 
     // alter tables and see timeline still exists
     client.alterCubeFactTable(factName, cubeFact, storageTables);
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
     assertEquals(Hive.get(client.getConf()).getTable(c1TableName).getParameters().get(
       MetastoreUtil.getPartitionTimelineCachePresenceKey()), "true");
     assertEquals(Hive.get(client.getConf()).getTable(c2TableName).getParameters().get(
       MetastoreUtil.getPartitionTimelineCachePresenceKey()), "true");
 
 
-    client.dropPartition(cubeFact.getName(), c1, timeParts5, null, UpdatePeriod.HOURLY);
-    client.dropPartition(cubeFact.getName(), c2, timeParts5, null, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeFact.getName(), c1, timeParts5, null, HOURLY);
+    client.dropPartition(cubeFact.getName(), c2, timeParts5, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 6);
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1343,15 +1359,15 @@ public class TestCubeMetastoreClient {
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, etPart.getName()));
 
     assertNoPartitionNamedLatest(c1TableName, partColNames);
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
 
-    assertTimeline(timelineDt, timelineDtC2, UpdatePeriod.HOURLY, nowMinus5, now, nowMinus4, nowMinus3, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, UpdatePeriod.HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3);
-    assertTimeline(timelineIt, timelineItC2, UpdatePeriod.HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3, nowMinus2);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus5, now, nowMinus4, nowMinus3, nowMinus1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus5, nowPlus1, nowMinus4, nowMinus3, nowMinus2);
 
 
-    client.dropPartition(cubeFact.getName(), c1, timeParts7, null, UpdatePeriod.HOURLY);
-    client.dropPartition(cubeFact.getName(), c2, timeParts7, null, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeFact.getName(), c1, timeParts7, null, HOURLY);
+    client.dropPartition(cubeFact.getName(), c2, timeParts7, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 5);
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1359,14 +1375,14 @@ public class TestCubeMetastoreClient {
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, etPart.getName()));
 
     assertNoPartitionNamedLatest(c1TableName, partColNames);
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
-    assertTimeline(timelineDt, timelineDtC2, UpdatePeriod.HOURLY, nowMinus2, now, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, UpdatePeriod.HOURLY, nowMinus2, nowPlus1);
-    assertTimeline(timelineIt, timelineItC2, UpdatePeriod.HOURLY, nowMinus1, nowPlus1);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus2, now, nowMinus1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus2, nowPlus1);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus1, nowPlus1);
 
 
-    client.dropPartition(cubeFact.getName(), c1, timeParts2, nonTimeSpec, UpdatePeriod.HOURLY);
-    client.dropPartition(cubeFact.getName(), c2, timeParts2, nonTimeSpec, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeFact.getName(), c1, timeParts2, nonTimeSpec, HOURLY);
+    client.dropPartition(cubeFact.getName(), c2, timeParts2, nonTimeSpec, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 4);
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1374,36 +1390,36 @@ public class TestCubeMetastoreClient {
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1, etPart.getName()));
 
     assertNoPartitionNamedLatest(c1TableName, partColNames);
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
-    assertTimeline(timelineDt, timelineDtC2, UpdatePeriod.HOURLY, nowMinus2, now, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, UpdatePeriod.HOURLY, nowMinus2, now);
-    assertTimeline(timelineIt, timelineItC2, UpdatePeriod.HOURLY, nowMinus1, nowPlus1);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus2, now, nowMinus1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus2, now);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus1, nowPlus1);
 
-    client.dropPartition(cubeFact.getName(), c1, timeParts4, null, UpdatePeriod.HOURLY);
-    client.dropPartition(cubeFact.getName(), c2, timeParts4, null, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeFact.getName(), c1, timeParts4, null, HOURLY);
+    client.dropPartition(cubeFact.getName(), c2, timeParts4, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 3);
 
     assertNoPartitionNamedLatest(c1TableName, partColNames);
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
-    assertTimeline(timelineDt, timelineDtC2, UpdatePeriod.HOURLY, nowMinus2, now, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, UpdatePeriod.HOURLY, nowMinus2, now, nowMinus1);
-    assertTimeline(timelineIt, timelineItC2, UpdatePeriod.HOURLY, nowMinus1, now);
-    client.dropPartition(cubeFact.getName(), c1, timeParts3, nonTimeSpec, UpdatePeriod.HOURLY);
-    client.dropPartition(cubeFact.getName(), c2, timeParts3, nonTimeSpec, UpdatePeriod.HOURLY);
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
-    assertTimeline(timelineDt, timelineDtC2, UpdatePeriod.HOURLY, nowMinus2, now, nowMinus1);
-    assertTimeline(timelineEt, timelineEtC2, UpdatePeriod.HOURLY, nowMinus2, now, nowMinus1);
-    assertTimeline(timelineIt, timelineItC2, UpdatePeriod.HOURLY, nowMinus1, now);
-
-    client.dropPartition(cubeFact.getName(), c1, timeParts6, null, UpdatePeriod.HOURLY);
-    client.dropPartition(cubeFact.getName(), c2, timeParts6, null, UpdatePeriod.HOURLY);
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
-    assertTimeline(timelineDt, timelineDtC2, UpdatePeriod.HOURLY, now, now);
-    assertTimeline(timelineEt, timelineEtC2, UpdatePeriod.HOURLY, now, now);
-    assertTimeline(timelineIt, timelineItC2, UpdatePeriod.HOURLY, now, now);
-    client.dropPartition(cubeFact.getName(), c1, timeParts1, null, UpdatePeriod.HOURLY);
-    client.dropPartition(cubeFact.getName(), c2, timeParts1, null, UpdatePeriod.HOURLY);
-    assertSameTimelines(factName, storages, UpdatePeriod.HOURLY, partColNames);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus2, now, nowMinus1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus2, now, nowMinus1);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus1, now);
+    client.dropPartition(cubeFact.getName(), c1, timeParts3, nonTimeSpec, HOURLY);
+    client.dropPartition(cubeFact.getName(), c2, timeParts3, nonTimeSpec, HOURLY);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, nowMinus2, now, nowMinus1);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, nowMinus2, now, nowMinus1);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, nowMinus1, now);
+
+    client.dropPartition(cubeFact.getName(), c1, timeParts6, null, HOURLY);
+    client.dropPartition(cubeFact.getName(), c2, timeParts6, null, HOURLY);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
+    assertTimeline(timelineDt, timelineDtC2, HOURLY, now, now);
+    assertTimeline(timelineEt, timelineEtC2, HOURLY, now, now);
+    assertTimeline(timelineIt, timelineItC2, HOURLY, now, now);
+    client.dropPartition(cubeFact.getName(), c1, timeParts1, null, HOURLY);
+    client.dropPartition(cubeFact.getName(), c2, timeParts1, null, HOURLY);
+    assertSameTimelines(factName, storages, HOURLY, partColNames);
     Assert.assertTrue(timelineDt.isEmpty());
     Assert.assertTrue(timelineEt.isEmpty());
     Assert.assertTrue(timelineIt.isEmpty());
@@ -1529,8 +1545,8 @@ public class TestCubeMetastoreClient {
 
     Map<String, Set<UpdatePeriod>> updatePeriods = new HashMap<String, Set<UpdatePeriod>>();
     Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
-    updates.add(UpdatePeriod.HOURLY);
-    updates.add(UpdatePeriod.DAILY);
+    updates.add(HOURLY);
+    updates.add(DAILY);
     ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
     List<String> timePartCols = new ArrayList<String>();
     partCols.add(getDatePartition());
@@ -1566,9 +1582,9 @@ public class TestCubeMetastoreClient {
     // test partition
     Map<String, Date> timeParts = new HashMap<String, Date>();
     timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
-    StoragePartitionDesc partSpec = new StoragePartitionDesc(cubeFact.getName(), timeParts, null, UpdatePeriod.HOURLY);
+    StoragePartitionDesc partSpec = new StoragePartitionDesc(cubeFact.getName(), timeParts, null, HOURLY);
     client.addPartition(partSpec, c1);
-    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, UpdatePeriod.HOURLY, timeParts,
+    Assert.assertTrue(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
       new HashMap<String, String>()));
     Assert.assertTrue(client.latestPartitionExists(cubeFact.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1577,8 +1593,8 @@ public class TestCubeMetastoreClient {
     assertEquals(parts.size(), 0);
     assertEquals(client.getAllParts(storageTableName).size(), 1);
 
-    client.dropPartition(cubeFact.getName(), c1, timeParts, null, UpdatePeriod.HOURLY);
-    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, UpdatePeriod.HOURLY, timeParts,
+    client.dropPartition(cubeFact.getName(), c1, timeParts, null, HOURLY);
+    Assert.assertFalse(client.factPartitionExists(cubeFact.getName(), c1, HOURLY, timeParts,
       new HashMap<String, String>()));
     Assert.assertFalse(client.latestPartitionExists(cubeFact.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1603,8 +1619,8 @@ public class TestCubeMetastoreClient {
 
     Map<String, Set<UpdatePeriod>> updatePeriods = new HashMap<String, Set<UpdatePeriod>>();
     Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
-    updates.add(UpdatePeriod.HOURLY);
-    updates.add(UpdatePeriod.DAILY);
+    updates.add(HOURLY);
+    updates.add(DAILY);
     ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
     List<String> timePartCols = new ArrayList<String>();
     partCols.add(getDatePartition());
@@ -1647,9 +1663,9 @@ public class TestCubeMetastoreClient {
     timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
     // test partition
     StoragePartitionDesc sPartSpec =
-      new StoragePartitionDesc(cubeFactWithParts.getName(), timeParts, partSpec, UpdatePeriod.HOURLY);
+      new StoragePartitionDesc(cubeFactWithParts.getName(), timeParts, partSpec, HOURLY);
     client.addPartition(sPartSpec, c1);
-    Assert.assertTrue(client.factPartitionExists(cubeFactWithParts.getName(), c1, UpdatePeriod.HOURLY, timeParts,
+    Assert.assertTrue(client.factPartitionExists(cubeFactWithParts.getName(), c1, HOURLY, timeParts,
       partSpec));
     Assert.assertTrue(client.latestPartitionExists(cubeFactWithParts.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1659,8 +1675,8 @@ public class TestCubeMetastoreClient {
     List<Partition> parts = client.getPartitionsByFilter(storageTableName, "dt='latest'");
     assertEquals(parts.size(), 0);
 
-    client.dropPartition(cubeFactWithParts.getName(), c1, timeParts, partSpec, UpdatePeriod.HOURLY);
-    Assert.assertFalse(client.factPartitionExists(cubeFactWithParts.getName(), c1, UpdatePeriod.HOURLY, timeParts,
+    client.dropPartition(cubeFactWithParts.getName(), c1, timeParts, partSpec, HOURLY);
+    Assert.assertFalse(client.factPartitionExists(cubeFactWithParts.getName(), c1, HOURLY, timeParts,
       partSpec));
     Assert.assertFalse(client.latestPartitionExists(cubeFactWithParts.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1685,8 +1701,8 @@ public class TestCubeMetastoreClient {
 
     Map<String, Set<UpdatePeriod>> updatePeriods = new HashMap<String, Set<UpdatePeriod>>();
     Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
-    updates.add(UpdatePeriod.HOURLY);
-    updates.add(UpdatePeriod.DAILY);
+    updates.add(HOURLY);
+    updates.add(DAILY);
     FieldSchema testDtPart = new FieldSchema("mydate", "string", "date part");
     ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
     List<String> timePartCols = new ArrayList<String>();
@@ -1734,9 +1750,9 @@ public class TestCubeMetastoreClient {
     timeParts.put(testDtPart.getName(), testDt);
     // test partition
     StoragePartitionDesc sPartSpec =
-      new StoragePartitionDesc(cubeFactWithParts.getName(), timeParts, partSpec, UpdatePeriod.HOURLY);
+      new StoragePartitionDesc(cubeFactWithParts.getName(), timeParts, partSpec, HOURLY);
     client.addPartition(sPartSpec, c1);
-    Assert.assertTrue(client.factPartitionExists(cubeFactWithParts.getName(), c1, UpdatePeriod.HOURLY, timeParts,
+    Assert.assertTrue(client.factPartitionExists(cubeFactWithParts.getName(), c1, HOURLY, timeParts,
       partSpec));
     Assert.assertTrue(client.latestPartitionExists(cubeFactWithParts.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1747,8 +1763,8 @@ public class TestCubeMetastoreClient {
 
     assertNoPartitionNamedLatest(storageTableName, "dt", testDtPart.getName());
 
-    client.dropPartition(cubeFactWithParts.getName(), c1, timeParts, partSpec, UpdatePeriod.HOURLY);
-    Assert.assertFalse(client.factPartitionExists(cubeFactWithParts.getName(), c1, UpdatePeriod.HOURLY, timeParts,
+    client.dropPartition(cubeFactWithParts.getName(), c1, timeParts, partSpec, HOURLY);
+    Assert.assertFalse(client.factPartitionExists(cubeFactWithParts.getName(), c1, HOURLY, timeParts,
       partSpec));
     Assert.assertFalse(client.latestPartitionExists(cubeFactWithParts.getName(), c1, testDtPart.getName()));
     Assert.assertFalse(client.latestPartitionExists(cubeFactWithParts.getName(), c1,
@@ -1774,8 +1790,8 @@ public class TestCubeMetastoreClient {
 
     Map<String, Set<UpdatePeriod>> updatePeriods = new HashMap<String, Set<UpdatePeriod>>();
     Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
-    updates.add(UpdatePeriod.HOURLY);
-    updates.add(UpdatePeriod.DAILY);
+    updates.add(HOURLY);
+    updates.add(DAILY);
     ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
     List<String> timePartCols = new ArrayList<String>();
     partCols.add(getDatePartition());
@@ -1823,9 +1839,9 @@ public class TestCubeMetastoreClient {
     timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
     // test partition
     StoragePartitionDesc sPartSpec =
-      new StoragePartitionDesc(cubeFactWithTwoStorages.getName(), timeParts, partSpec, UpdatePeriod.HOURLY);
+      new StoragePartitionDesc(cubeFactWithTwoStorages.getName(), timeParts, partSpec, HOURLY);
     client.addPartition(sPartSpec, c1);
-    Assert.assertTrue(client.factPartitionExists(cubeFactWithTwoStorages.getName(), c1, UpdatePeriod.HOURLY, timeParts,
+    Assert.assertTrue(client.factPartitionExists(cubeFactWithTwoStorages.getName(), c1, HOURLY, timeParts,
       partSpec));
     Assert.assertTrue(client.latestPartitionExists(cubeFactWithTwoStorages.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1835,9 +1851,9 @@ public class TestCubeMetastoreClient {
     assertNoPartitionNamedLatest(storageTableName, "dt");
 
     StoragePartitionDesc sPartSpec2 =
-      new StoragePartitionDesc(cubeFactWithTwoStorages.getName(), timeParts, null, UpdatePeriod.HOURLY);
+      new StoragePartitionDesc(cubeFactWithTwoStorages.getName(), timeParts, null, HOURLY);
     client.addPartition(sPartSpec2, c2);
-    Assert.assertTrue(client.factPartitionExists(cubeFactWithTwoStorages.getName(), c2, UpdatePeriod.HOURLY, timeParts,
+    Assert.assertTrue(client.factPartitionExists(cubeFactWithTwoStorages.getName(), c2, HOURLY, timeParts,
       new HashMap<String, String>()));
     Assert.assertTrue(client.latestPartitionExists(cubeFactWithTwoStorages.getName(), c2,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1846,15 +1862,15 @@ public class TestCubeMetastoreClient {
 
     assertNoPartitionNamedLatest(storageTableName2, "dt");
 
-    client.dropPartition(cubeFactWithTwoStorages.getName(), c1, timeParts, partSpec, UpdatePeriod.HOURLY);
-    Assert.assertFalse(client.factPartitionExists(cubeFactWithTwoStorages.getName(), c1, UpdatePeriod.HOURLY,
+    client.dropPartition(cubeFactWithTwoStorages.getName(), c1, timeParts, partSpec, HOURLY);
+    Assert.assertFalse(client.factPartitionExists(cubeFactWithTwoStorages.getName(), c1, HOURLY,
       timeParts, partSpec));
     Assert.assertFalse(client.latestPartitionExists(cubeFactWithTwoStorages.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
     assertEquals(client.getAllParts(storageTableName).size(), 0);
 
-    client.dropPartition(cubeFactWithTwoStorages.getName(), c2, timeParts, null, UpdatePeriod.HOURLY);
-    Assert.assertFalse(client.factPartitionExists(cubeFactWithTwoStorages.getName(), c2, UpdatePeriod.HOURLY,
+    client.dropPartition(cubeFactWithTwoStorages.getName(), c2, timeParts, null, HOURLY);
+    Assert.assertFalse(client.factPartitionExists(cubeFactWithTwoStorages.getName(), c2, HOURLY,
       timeParts, new HashMap<String, String>()));
     Assert.assertFalse(client.latestPartitionExists(cubeFactWithTwoStorages.getName(), c2,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1881,7 +1897,7 @@ public class TestCubeMetastoreClient {
     s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
     s1.setPartCols(partCols);
     s1.setTimePartCols(timePartCols);
-    dumpPeriods.put(c1, UpdatePeriod.HOURLY);
+    dumpPeriods.put(c1, HOURLY);
 
     Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
     storageTables.put(c1, s1);
@@ -1913,7 +1929,7 @@ public class TestCubeMetastoreClient {
     // test partition
     Map<String, Date> timeParts = new HashMap<String, Date>();
     timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
-    StoragePartitionDesc sPartSpec = new StoragePartitionDesc(cubeDim.getName(), timeParts, null, UpdatePeriod.HOURLY);
+    StoragePartitionDesc sPartSpec = new StoragePartitionDesc(cubeDim.getName(), timeParts, null, HOURLY);
     client.addPartition(sPartSpec, c1);
     Assert.assertTrue(client.dimPartitionExists(cubeDim.getName(), c1, timeParts));
     Assert
@@ -1924,9 +1940,9 @@ public class TestCubeMetastoreClient {
     assertEquals(1, parts.size());
     assertEquals(TextInputFormat.class.getCanonicalName(), parts.get(0).getInputFormatClass().getCanonicalName());
     assertEquals(parts.get(0).getParameters().get(MetastoreUtil.getLatestPartTimestampKey("dt")),
-      UpdatePeriod.HOURLY.format().format(now));
+      HOURLY.format().format(now));
 
-    client.dropPartition(cubeDim.getName(), c1, timeParts, null, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeDim.getName(), c1, timeParts, null, HOURLY);
     Assert.assertFalse(client.dimPartitionExists(cubeDim.getName(), c1, timeParts));
     Assert.assertFalse(client.latestPartitionExists(cubeDim.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
@@ -1954,7 +1970,7 @@ public class TestCubeMetastoreClient {
     s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
     s1.setPartCols(partCols);
     s1.setTimePartCols(timePartCols);
-    dumpPeriods.put(c1, UpdatePeriod.HOURLY);
+    dumpPeriods.put(c1, HOURLY);
 
     Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
     storageTables.put(c1, s1);
@@ -1993,7 +2009,7 @@ public class TestCubeMetastoreClient {
     Assert.assertFalse(client.dimTableLatestPartitionExists(storageTableName));
     Map<String, Date> timeParts = new HashMap<String, Date>();
     timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
-    StoragePartitionDesc sPartSpec = new StoragePartitionDesc(cubeDim.getName(), timeParts, null, UpdatePeriod.HOURLY);
+    StoragePartitionDesc sPartSpec = new StoragePartitionDesc(cubeDim.getName(), timeParts, null, HOURLY);
     client.addPartition(sPartSpec, c1);
     Assert.assertTrue(client.dimPartitionExists(cubeDim.getName(), c1, timeParts));
     Assert
@@ -2005,7 +2021,7 @@ public class TestCubeMetastoreClient {
     assertEquals(TextInputFormat.class.getCanonicalName(), parts.get(0).getInputFormatClass().getCanonicalName());
     Assert.assertFalse(parts.get(0).getCols().contains(newcol));
     assertEquals(parts.get(0).getParameters().get(MetastoreUtil.getLatestPartTimestampKey("dt")),
-      UpdatePeriod.HOURLY.format().format(now));
+      HOURLY.format().format(now));
 
     // Partition with different schema
     cubeDim.alterColumn(newcol);
@@ -2014,7 +2030,7 @@ public class TestCubeMetastoreClient {
     Map<String, Date> timeParts2 = new HashMap<String, Date>();
     timeParts2.put(TestCubeMetastoreClient.getDatePartitionKey(), nowPlus1);
     StoragePartitionDesc sPartSpec2 =
-      new StoragePartitionDesc(cubeDim.getName(), timeParts2, null, UpdatePeriod.HOURLY);
+      new StoragePartitionDesc(cubeDim.getName(), timeParts2, null, HOURLY);
     sPartSpec2.setInputFormat(SequenceFileInputFormat.class.getCanonicalName());
     sPartSpec2.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
     client.addPartition(sPartSpec2, c1);
@@ -2029,11 +2045,11 @@ public class TestCubeMetastoreClient {
       .getCanonicalName());
     Assert.assertTrue(parts.get(0).getCols().contains(newcol));
     assertEquals(parts.get(0).getParameters().get(MetastoreUtil.getLatestPartTimestampKey("dt")),
-      UpdatePeriod.HOURLY.format().format(nowPlus1));
+      HOURLY.format().format(nowPlus1));
 
-    client.dropPartition(cubeDim.getName(), c1, timeParts2, null, UpdatePeriod.HOURLY);
-    Assert.assertTrue(client.dimPartitionExists(cubeDim.getName(), c1, timeParts));
-    Assert.assertFalse(client.dimPartitionExists(cubeDim.getName(), c1, timeParts2));
+    client.dropPartition(cubeDim.getName(), c1, timeParts, null, HOURLY);
+    Assert.assertTrue(client.dimPartitionExists(cubeDim.getName(), c1, timeParts2));
+    Assert.assertFalse(client.dimPartitionExists(cubeDim.getName(), c1, timeParts));
     Assert
       .assertTrue(client.latestPartitionExists(cubeDim.getName(), c1, TestCubeMetastoreClient.getDatePartitionKey()));
     Assert.assertTrue(client.dimTableLatestPartitionExists(storageTableName));
@@ -2041,12 +2057,12 @@ public class TestCubeMetastoreClient {
     assertEquals(1, parts.size());
     assertEquals(TextInputFormat.class.getCanonicalName(), parts.get(0).getInputFormatClass().getCanonicalName());
     assertEquals(parts.get(0).getParameters().get(MetastoreUtil.getLatestPartTimestampKey("dt")),
-      UpdatePeriod.HOURLY.format().format(now));
+      HOURLY.format().format(nowPlus1));
     assertEquals(client.getAllParts(storageTableName).size(), 2);
 
-    client.dropPartition(cubeDim.getName(), c1, timeParts, null, UpdatePeriod.HOURLY);
-    Assert.assertFalse(client.dimPartitionExists(cubeDim.getName(), c1, timeParts));
+    client.dropPartition(cubeDim.getName(), c1, timeParts2, null, HOURLY);
     Assert.assertFalse(client.dimPartitionExists(cubeDim.getName(), c1, timeParts2));
+    Assert.assertFalse(client.dimPartitionExists(cubeDim.getName(), c1, timeParts));
     Assert.assertFalse(client.latestPartitionExists(cubeDim.getName(), c1,
       TestCubeMetastoreClient.getDatePartitionKey()));
     assertEquals(client.getAllParts(storageTableName).size(), 0);
@@ -2094,14 +2110,14 @@ public class TestCubeMetastoreClient {
 
     timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
     nonTimeParts.put("region", "asia");
-    StoragePartitionDesc sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, UpdatePeriod.HOURLY);
+    StoragePartitionDesc sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, HOURLY);
     client.addPartition(sPartSpec, c3);
     expectedLatestValues.put("asia", now);
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
     timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), nowMinus1);
     nonTimeParts.put("region", "africa");
-    sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, UpdatePeriod.HOURLY);
+    sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, HOURLY);
     client.addPartition(sPartSpec, c3);
     expectedLatestValues.put("asia", now);
     expectedLatestValues.put("africa", nowMinus1);
@@ -2109,7 +2125,7 @@ public class TestCubeMetastoreClient {
 
     timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), nowPlus1);
     nonTimeParts.put("region", "africa");
-    sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, UpdatePeriod.HOURLY);
+    sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, HOURLY);
     client.addPartition(sPartSpec, c3);
     expectedLatestValues.put("asia", now);
     expectedLatestValues.put("africa", nowPlus1);
@@ -2117,19 +2133,19 @@ public class TestCubeMetastoreClient {
 
     timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), nowPlus3);
     nonTimeParts.put("region", "asia");
-    sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, UpdatePeriod.HOURLY);
+    sPartSpec = new StoragePartitionDesc(dimName, timeParts, nonTimeParts, HOURLY);
     client.addPartition(sPartSpec, c3);
     expectedLatestValues.put("asia", nowPlus3);
     expectedLatestValues.put("africa", nowPlus1);
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
-    client.dropPartition(dimName, c3, timeParts, nonTimeParts, UpdatePeriod.HOURLY);
+    client.dropPartition(dimName, c3, timeParts, nonTimeParts, HOURLY);
     expectedLatestValues.put("asia", now);
     expectedLatestValues.put("africa", nowPlus1);
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
     timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
-    client.dropPartition(dimName, c3, timeParts, nonTimeParts, UpdatePeriod.HOURLY);
+    client.dropPartition(dimName, c3, timeParts, nonTimeParts, HOURLY);
     expectedLatestValues.remove("asia");
     assertLatestForRegions(storageTableName, expectedLatestValues);
 
@@ -2148,7 +2164,7 @@ public class TestCubeMetastoreClient {
     assertEquals(parts.size(), expectedLatestValues.size());
     for (Partition part : parts) {
       assertEquals(MetastoreUtil.getLatestTimeStampOfDimtable(part, getDatePartitionKey()),
-        TimePartition.of(UpdatePeriod.HOURLY, expectedLatestValues.get(part.getSpec().get("region"))).getDate());
+        TimePartition.of(HOURLY, expectedLatestValues.get(part.getSpec().get("region"))).getDate());
     }
   }
 
@@ -2181,7 +2197,7 @@ public class TestCubeMetastoreClient {
     s1.setTimePartCols(timePartCols);
     String[] partColNames = new String[]{getDatePartitionKey(), itPart.getName(), etPart.getName()};
     Map<String, UpdatePeriod> dumpPeriods = new HashMap<String, UpdatePeriod>();
-    dumpPeriods.put(c1, UpdatePeriod.HOURLY);
+    dumpPeriods.put(c1, HOURLY);
 
     Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
     storageTables.put(c1, s1);
@@ -2218,7 +2234,7 @@ public class TestCubeMetastoreClient {
     timeParts1.put(itPart.getName(), now);
     timeParts1.put(etPart.getName(), now);
     StoragePartitionDesc partSpec1 = new StoragePartitionDesc(cubeDim.getName(), timeParts1, null,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
     Map<String, Date> timeParts2 = new HashMap<String, Date>();
     timeParts2.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
@@ -2226,27 +2242,27 @@ public class TestCubeMetastoreClient {
     Map<String, String> nonTimeSpec = new HashMap<String, String>();
     nonTimeSpec.put(itPart.getName(), "default");
     final StoragePartitionDesc partSpec2 = new StoragePartitionDesc(cubeDim.getName(), timeParts2, nonTimeSpec,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
     Map<String, Date> timeParts3 = new HashMap<String, Date>();
     timeParts3.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
     timeParts3.put(etPart.getName(), now);
     final StoragePartitionDesc partSpec3 = new StoragePartitionDesc(cubeDim.getName(), timeParts3, nonTimeSpec,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
     client.addPartitions(Arrays.asList(partSpec1, partSpec2, partSpec3), c1);
     String c1TableName = MetastoreUtil.getFactOrDimtableStorageTableName(cubeDim.getName(), c1);
     assertEquals(client.getAllParts(c1TableName).size(), 8);
 
-    assertEquals(getLatestValues(c1TableName, UpdatePeriod.HOURLY, partColNames, null),
-      toPartitionArray(UpdatePeriod.HOURLY, now, now, nowPlus1));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
+      toPartitionArray(HOURLY, now, now, nowPlus1));
 
     Map<String, Date> timeParts4 = new HashMap<String, Date>();
     timeParts4.put(TestCubeMetastoreClient.getDatePartitionKey(), now);
     timeParts4.put(itPart.getName(), nowPlus1);
     timeParts4.put(etPart.getName(), nowMinus1);
     final StoragePartitionDesc partSpec4 = new StoragePartitionDesc(cubeDim.getName(), timeParts4, null,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
 
     Map<String, Date> timeParts5 = new HashMap<String, Date>();
@@ -2254,19 +2270,19 @@ public class TestCubeMetastoreClient {
     timeParts5.put(itPart.getName(), nowMinus1);
     timeParts5.put(etPart.getName(), nowMinus2);
     final StoragePartitionDesc partSpec5 = new StoragePartitionDesc(cubeDim.getName(), timeParts5, null,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
     client.addPartitions(Arrays.asList(partSpec4, partSpec5), c1);
 
     assertEquals(client.getAllParts(c1TableName).size(), 10);
-    assertEquals(getLatestValues(c1TableName, UpdatePeriod.HOURLY, partColNames, null),
-      toPartitionArray(UpdatePeriod.HOURLY, nowPlus1, nowPlus1, nowPlus1));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
+      toPartitionArray(HOURLY, nowPlus1, nowPlus1, nowPlus1));
     Map<String, Date> timeParts6 = new HashMap<String, Date>();
     timeParts6.put(TestCubeMetastoreClient.getDatePartitionKey(), nowMinus2);
     timeParts6.put(itPart.getName(), nowMinus1);
     timeParts6.put(etPart.getName(), nowMinus2);
     final StoragePartitionDesc partSpec6 = new StoragePartitionDesc(cubeDim.getName(), timeParts6, null,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
     client.addPartition(partSpec6, c1);
 
@@ -2278,38 +2294,38 @@ public class TestCubeMetastoreClient {
     timeParts7.put(itPart.getName(), nowMinus5);
     timeParts7.put(etPart.getName(), nowMinus5);
     final StoragePartitionDesc partSpec7 = new StoragePartitionDesc(cubeDim.getName(), timeParts7, null,
-      UpdatePeriod.HOURLY);
+      HOURLY);
 
     client.addPartition(partSpec7, c1);
     assertEquals(client.getAllParts(c1TableName).size(), 12);
-    assertEquals(getLatestValues(c1TableName, UpdatePeriod.HOURLY, partColNames, null),
-      toPartitionArray(UpdatePeriod.HOURLY, nowPlus1, nowPlus1, nowPlus1));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
+      toPartitionArray(HOURLY, nowPlus1, nowPlus1, nowPlus1));
 
-    client.dropPartition(cubeDim.getName(), c1, timeParts5, null, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeDim.getName(), c1, timeParts5, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 11);
-    assertEquals(getLatestValues(c1TableName, UpdatePeriod.HOURLY, partColNames, null),
-      toPartitionArray(UpdatePeriod.HOURLY, now, nowPlus1, nowPlus1));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
+      toPartitionArray(HOURLY, now, nowPlus1, nowPlus1));
 
-    client.dropPartition(cubeDim.getName(), c1, timeParts7, null, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeDim.getName(), c1, timeParts7, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 10);
-    assertEquals(getLatestValues(c1TableName, UpdatePeriod.HOURLY, partColNames, null),
-      toPartitionArray(UpdatePeriod.HOURLY, now, nowPlus1, nowPlus1));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
+      toPartitionArray(HOURLY, now, nowPlus1, nowPlus1));
 
-    client.dropPartition(cubeDim.getName(), c1, timeParts2, nonTimeSpec, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeDim.getName(), c1, timeParts2, nonTimeSpec, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 9);
-    assertEquals(getLatestValues(c1TableName, UpdatePeriod.HOURLY, partColNames, null),
-      toPartitionArray(UpdatePeriod.HOURLY, now, nowPlus1, now));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
+      toPartitionArray(HOURLY, now, nowPlus1, now));
 
-    client.dropPartition(cubeDim.getName(), c1, timeParts4, null, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeDim.getName(), c1, timeParts4, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 8);
-    assertEquals(getLatestValues(c1TableName, UpdatePeriod.HOURLY, partColNames, null),
-      toPartitionArray(UpdatePeriod.HOURLY, now, now, now));
+    assertEquals(getLatestValues(c1TableName, HOURLY, partColNames, null),
+      toPartitionArray(HOURLY, now, now, now));
 
-    client.dropPartition(cubeDim.getName(), c1, timeParts3, nonTimeSpec, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeDim.getName(), c1, timeParts3, nonTimeSpec, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 5);
-    client.dropPartition(cubeDim.getName(), c1, timeParts6, null, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeDim.getName(), c1, timeParts6, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 4);
-    client.dropPartition(cubeDim.getName(), c1, timeParts1, null, UpdatePeriod.HOURLY);
+    client.dropPartition(cubeDim.getName(), c1, timeParts1, null, HOURLY);
     assertEquals(client.getAllParts(c1TableName).size(), 0);
     assertNoPartitionNamedLatest(c1TableName, partColNames);
   }
@@ -2331,7 +2347,7 @@ public class TestCubeMetastoreClient {
     s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
     s1.setPartCols(partCols);
     s1.setTimePartCols(timePartCols);
-    dumpPeriods.put(c1, UpdatePeriod.HOURLY);
+    dumpPeriods.put(c1, HOURLY);
 
     Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
     storageTables.put(c1, s1);
@@ -2418,7 +2434,7 @@ public class TestCubeMetastoreClient {
     s2.setInputFormat(TextInputFormat.class.getCanonicalName());
     s2.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
     client.addStorage(dimTable, c2, null, s2);
-    client.addStorage(dimTable, c3, UpdatePeriod.DAILY, s1);
+    client.addStorage(dimTable, c3, DAILY, s1);
     Assert.assertTrue(client.tableExists(MetastoreUtil.getFactOrDimtableStorageTableName(dimTblName, c2)));
     Assert.assertTrue(client.tableExists(MetastoreUtil.getFactOrDimtableStorageTableName(dimTblName, c3)));
     CubeDimensionTable altered3 = client.getDimensionTable(dimTblName);
@@ -2501,7 +2517,7 @@ public class TestCubeMetastoreClient {
     s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
     s1.setPartCols(partCols);
     s1.setTimePartCols(timePartCols);
-    dumpPeriods.put(c1, UpdatePeriod.HOURLY);
+    dumpPeriods.put(c1, HOURLY);
 
     StorageTableDesc s2 = new StorageTableDesc();
     s2.setInputFormat(TextInputFormat.class.getCanonicalName());

http://git-wip-us.apache.org/repos/asf/lens/blob/e5691d8d/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensException.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensException.java b/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensException.java
index ac1c558..a1ffeb6 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensException.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/error/LensException.java
@@ -55,6 +55,7 @@ public class LensException extends Exception implements Comparable<LensException
 
   @Getter
   private final LensErrorInfo errorInfo;
+  private String formattedErrorMsg;
 
   public int getErrorCode() {
     return errorInfo.getErrorCode();
@@ -202,12 +203,17 @@ public class LensException extends Exception implements Comparable<LensException
 
   private LensErrorTO buildLensErrorTO(final ErrorCollection errorCollection, final LensError lensError) {
 
-    final String formattedErrorMsg = getFormattedErrorMsg(lensError);
+    formattedErrorMsg = getFormattedErrorMsg(lensError);
     final String stackTrace = getStackTraceString();
     return buildLensErrorTO(errorCollection, formattedErrorMsg, stackTrace);
   }
 
   @Override
+  public String getMessage() {
+    return formattedErrorMsg != null ? formattedErrorMsg : super.getMessage();
+  }
+
+  @Override
   public boolean equals(final Object o) {
 
     if (this == o) {


[03/50] [abbrv] lens git commit: LENS-824 Fix all javadoc warnings/errors

Posted by sh...@apache.org.
LENS-824 Fix all javadoc warnings/errors


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/10dcebb9
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/10dcebb9
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/10dcebb9

Branch: refs/heads/LENS-581
Commit: 10dcebb9247f605769dfd90e25be82aff3f79dbc
Parents: be36b63
Author: Gabor Liptak <gl...@users.sourceforge.net>
Authored: Mon Nov 16 11:40:06 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Mon Nov 16 11:40:06 2015 +0530

----------------------------------------------------------------------
 .../lens/api/query/save/ParameterCollectionType.java    |  2 +-
 .../org/apache/lens/api/result/PrettyPrintable.java     |  2 +-
 .../java/org/apache/lens/api/util/PathValidator.java    | 12 ++++++------
 .../org/apache/lens/cli/commands/LensQueryCommands.java |  1 -
 .../lens/server/metastore/CubeMetastoreServiceImpl.java |  2 +-
 .../lens/server/query/QueryExecutionServiceImpl.java    |  2 +-
 .../apache/lens/server/query/save/SavedQueryDao.java    |  4 ++--
 .../org/apache/lens/server/quota/QuotaServiceImpl.java  |  2 +-
 .../server/scheduler/QuerySchedulerServiceImpl.java     |  2 +-
 .../apache/lens/server/session/HiveSessionService.java  |  2 +-
 pom.xml                                                 |  6 +++---
 11 files changed, 18 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/10dcebb9/lens-api/src/main/java/org/apache/lens/api/query/save/ParameterCollectionType.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/query/save/ParameterCollectionType.java b/lens-api/src/main/java/org/apache/lens/api/query/save/ParameterCollectionType.java
index 971e794..14637de 100644
--- a/lens-api/src/main/java/org/apache/lens/api/query/save/ParameterCollectionType.java
+++ b/lens-api/src/main/java/org/apache/lens/api/query/save/ParameterCollectionType.java
@@ -24,7 +24,7 @@ import javax.xml.bind.annotation.XmlRootElement;
  * The enum ParameterCollectionType
  * Collection type of a parameter has to be chosen based on its context.
  * - If it is occurring next to an IN/NOT IN clause, its multiple
- * - If it is found with EQ/NEQ..>,<,>=,<=,like etc, its single
+ * - If it is found with EQ/NEQ..&gt;,&lt;,&gt;=,&lt;=,like etc, its single
  */
 @XmlRootElement
 public enum ParameterCollectionType {

http://git-wip-us.apache.org/repos/asf/lens/blob/10dcebb9/lens-api/src/main/java/org/apache/lens/api/result/PrettyPrintable.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/result/PrettyPrintable.java b/lens-api/src/main/java/org/apache/lens/api/result/PrettyPrintable.java
index e6152b8..5c45596 100644
--- a/lens-api/src/main/java/org/apache/lens/api/result/PrettyPrintable.java
+++ b/lens-api/src/main/java/org/apache/lens/api/result/PrettyPrintable.java
@@ -26,7 +26,7 @@ public interface PrettyPrintable {
   /**
    * Returns the contents of this object in the form of a pretty formatted string.
    *
-   * @return
+   * @return contents as formatted String
    */
   String toPrettyString();
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/10dcebb9/lens-api/src/main/java/org/apache/lens/api/util/PathValidator.java
----------------------------------------------------------------------
diff --git a/lens-api/src/main/java/org/apache/lens/api/util/PathValidator.java b/lens-api/src/main/java/org/apache/lens/api/util/PathValidator.java
index 92a936a..45397d7 100644
--- a/lens-api/src/main/java/org/apache/lens/api/util/PathValidator.java
+++ b/lens-api/src/main/java/org/apache/lens/api/util/PathValidator.java
@@ -52,10 +52,10 @@ public class PathValidator {
    * Also removes erroneously appended prefix for URI's.
    * Takes additional properties for special URI handling.
    *
-   * @param path
-   * @param shouldBeDirectory
-   * @param shouldExist
-   * @return
+   * @param path input path
+   * @param shouldBeDirectory should be a directory
+   * @param shouldExist should exist
+   * @return converted path
    */
   public String getValidPath(File path, boolean shouldBeDirectory,
                                        boolean shouldExist) {
@@ -89,8 +89,8 @@ public class PathValidator {
    *
    * Any new URI's have to be handled appropriately
    *
-   * @param path
-   * @return
+   * @param path input path
+   * @return cleaned up path
    */
   public String removePrefixBeforeURI(String path) {
     /**

http://git-wip-us.apache.org/repos/asf/lens/blob/10dcebb9/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
index 096fd7a..83b919b 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensQueryCommands.java
@@ -181,7 +181,6 @@ public class LensQueryCommands extends BaseLensCommand {
    * Explain query.
    *
    * @param sql      the sql
-   * @param location the location
    * @return the string
    * @throws LensAPIException
    * @throws UnsupportedEncodingException the unsupported encoding exception

http://git-wip-us.apache.org/repos/asf/lens/blob/10dcebb9/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
index 4c8b525..ea913ad 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
@@ -1448,7 +1448,7 @@ public class CubeMetastoreServiceImpl extends BaseLensService implements CubeMet
   }
 
   /**
-   * @inheritDoc
+   * {@inheritDoc}
    */
   @Override
   public HealthStatus getHealthStatus() {

http://git-wip-us.apache.org/repos/asf/lens/blob/10dcebb9/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
index 4d8ae51..fdc8bfd 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/QueryExecutionServiceImpl.java
@@ -2417,7 +2417,7 @@ public class QueryExecutionServiceImpl extends BaseLensService implements QueryE
   }
 
   /**
-   * @inheritDoc
+   * {@inheritDoc}
    */
   @Override
   public HealthStatus getHealthStatus() {

http://git-wip-us.apache.org/repos/asf/lens/blob/10dcebb9/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryDao.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryDao.java b/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryDao.java
index 9dfde0d..18d563d 100644
--- a/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryDao.java
+++ b/lens-server/src/main/java/org/apache/lens/server/query/save/SavedQueryDao.java
@@ -76,7 +76,7 @@ public class SavedQueryDao {
   /**
    * Creates the saved query table
    *
-   * @throws LensException
+   * @throws LensException cannot create saved query table
    */
   public void createSavedQueryTableIfNotExists() throws LensException {
     try {
@@ -90,7 +90,7 @@ public class SavedQueryDao {
    * Saves the query passed
    *
    * @param savedQuery
-   * @return
+   * @return insert id
    * @throws LensException
    */
   public long saveQuery(SavedQuery savedQuery) throws LensException {

http://git-wip-us.apache.org/repos/asf/lens/blob/10dcebb9/lens-server/src/main/java/org/apache/lens/server/quota/QuotaServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/quota/QuotaServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/quota/QuotaServiceImpl.java
index cc627cf..4b67295 100644
--- a/lens-server/src/main/java/org/apache/lens/server/quota/QuotaServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/quota/QuotaServiceImpl.java
@@ -44,7 +44,7 @@ public class QuotaServiceImpl extends BaseLensService implements QuotaService {
   }
 
   /**
-   * @inheritDoc
+   * {@inheritDoc}
    */
   @Override
   public HealthStatus getHealthStatus() {

http://git-wip-us.apache.org/repos/asf/lens/blob/10dcebb9/lens-server/src/main/java/org/apache/lens/server/scheduler/QuerySchedulerServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/scheduler/QuerySchedulerServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/scheduler/QuerySchedulerServiceImpl.java
index 3ea7524..24949bd 100644
--- a/lens-server/src/main/java/org/apache/lens/server/scheduler/QuerySchedulerServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/scheduler/QuerySchedulerServiceImpl.java
@@ -44,7 +44,7 @@ public class QuerySchedulerServiceImpl extends BaseLensService implements QueryS
   }
 
   /**
-   * @inheritDoc
+   * {@inheritDoc}
    */
   @Override
   public HealthStatus getHealthStatus() {

http://git-wip-us.apache.org/repos/asf/lens/blob/10dcebb9/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java b/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
index 22497ca..2745b27 100644
--- a/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
+++ b/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
@@ -452,7 +452,7 @@ public class HiveSessionService extends BaseLensService implements SessionServic
   }
 
   /**
-   * @inheritDoc
+   * {@inheritDoc}
    */
   @Override
   public HealthStatus getHealthStatus() {

http://git-wip-us.apache.org/repos/asf/lens/blob/10dcebb9/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 49a9d50..9b417d9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -39,7 +39,7 @@
     <servlet.api.version>2.5</servlet.api.version>
     <jsp.api.version>2.0</jsp.api.version>
     <ws.rs.version>2.0</ws.rs.version>
-    <jaxb.api.version>2.1</jaxb.api.version>
+    <jaxb.api.version>2.2.11</jaxb.api.version>
     <javax.mail.version>1.4</javax.mail.version>
 
     <!-- hadoop stack -->
@@ -90,8 +90,8 @@
     <powermock.version>1.5.6</powermock.version>
 
     <!-- maven plugins -->
-    <jaxb2.plugin.version>0.12.3</jaxb2.plugin.version>
-    <jaxb2.basics.plugin.version>0.6.3</jaxb2.basics.plugin.version>
+    <jaxb2.plugin.version>0.13.0</jaxb2.plugin.version>
+    <jaxb2.basics.plugin.version>0.9.5</jaxb2.basics.plugin.version>
     <jetty.plugin.version>${jetty.version}</jetty.plugin.version>
     <exec.plugin.version>1.2.1</exec.plugin.version>
     <enunciate.plugin.version>1.28</enunciate.plugin.version>


[47/50] [abbrv] lens git commit: LENS-552: Union support across storage tables in multi fact query

Posted by sh...@apache.org.
LENS-552: Union support across storage tables in multi fact query


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/04f5a822
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/04f5a822
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/04f5a822

Branch: refs/heads/LENS-581
Commit: 04f5a8223f652baa5cfbebec7f8f9a2886df5076
Parents: bf1053b
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Thu Dec 24 12:39:33 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Thu Dec 24 12:39:33 2015 +0530

----------------------------------------------------------------------
 .../lens/cube/parse/AggregateResolver.java      |  12 +-
 .../apache/lens/cube/parse/AliasReplacer.java   |   5 +-
 .../apache/lens/cube/parse/AutoJoinContext.java |   2 +-
 .../apache/lens/cube/parse/CandidateFact.java   | 130 ++++---------------
 .../apache/lens/cube/parse/ColumnResolver.java  |   2 +-
 .../lens/cube/parse/CubeQueryContext.java       |  69 +++++-----
 .../lens/cube/parse/CubeQueryRewriter.java      |   2 +-
 .../apache/lens/cube/parse/DefaultQueryAST.java |  74 +++++++++++
 .../cube/parse/DenormalizationResolver.java     |  18 +--
 .../apache/lens/cube/parse/DimHQLContext.java   |   6 +-
 .../lens/cube/parse/DimOnlyHQLContext.java      |  15 +--
 .../lens/cube/parse/ExpressionResolver.java     |  24 ++--
 .../apache/lens/cube/parse/FactHQLContext.java  |  65 ----------
 .../apache/lens/cube/parse/GroupbyResolver.java |   2 -
 .../org/apache/lens/cube/parse/HQLParser.java   |   1 -
 .../apache/lens/cube/parse/JoinResolver.java    |  17 +--
 .../lens/cube/parse/MultiFactHQLContext.java    |  46 +++----
 .../org/apache/lens/cube/parse/QueryAST.java    |  86 ++++++++++++
 .../lens/cube/parse/SingleFactHQLContext.java   |  96 --------------
 .../parse/SingleFactMultiStorageHQLContext.java |  52 +++++---
 .../SingleFactSingleStorageHQLContext.java      |  85 ++++++++++++
 .../lens/cube/parse/StorageTableResolver.java   |  43 +++---
 .../apache/lens/cube/metadata/DateFactory.java  |  11 ++
 .../apache/lens/cube/parse/CubeTestSetup.java   |   2 +-
 .../lens/cube/parse/TestBaseCubeQueries.java    |  24 ++--
 .../lens/cube/parse/TestCubeRewriter.java       |  53 ++++++--
 26 files changed, 481 insertions(+), 461 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
index 39bd1cc..fd7036a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java
@@ -47,8 +47,6 @@ import lombok.extern.slf4j.Slf4j;
  */
 @Slf4j
 class AggregateResolver implements ContextRewriter {
-  public AggregateResolver(Configuration conf) {
-  }
 
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws LensException {
@@ -166,7 +164,7 @@ class AggregateResolver implements ContextRewriter {
     String colname;
 
     if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) {
-      colname = ((ASTNode) node.getChild(0)).getText();
+      colname = node.getChild(0).getText();
     } else {
       // node in 'alias.column' format
       ASTNode tabident = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL, Identifier);
@@ -193,15 +191,9 @@ class AggregateResolver implements ContextRewriter {
           throw new LensException(LensCubeErrorCode.NO_DEFAULT_AGGREGATE.getLensErrorInfo(), colname);
         }
         ASTNode fnroot = new ASTNode(new CommonToken(HiveParser.TOK_FUNCTION));
-        fnroot.setParent(node.getParent());
-
         ASTNode fnIdentNode = new ASTNode(new CommonToken(HiveParser.Identifier, aggregateFn));
-        fnIdentNode.setParent(fnroot);
         fnroot.addChild(fnIdentNode);
-
-        node.setParent(fnroot);
         fnroot.addChild(node);
-
         return fnroot;
       }
     } else {
@@ -224,7 +216,7 @@ class AggregateResolver implements ContextRewriter {
 
       String colname;
       if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) {
-        colname = ((ASTNode) node.getChild(0)).getText();
+        colname = node.getChild(0).getText();
       } else {
         // node in 'alias.column' format
         ASTNode colIdent = (ASTNode) node.getChild(1);

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
index 0656049..e629731 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
@@ -81,7 +81,7 @@ class AliasReplacer implements ContextRewriter {
 
     replaceAliases(cubeql.getWhereAST(), 0, colToTableAlias);
 
-    replaceAliases(cubeql.getJoinTree(), 0, colToTableAlias);
+    replaceAliases(cubeql.getJoinAST(), 0, colToTableAlias);
 
     // Update the aggregate expression set
     AggregateResolver.updateAggregates(cubeql.getSelectAST(), cubeql);
@@ -183,7 +183,6 @@ class AliasReplacer implements ContextRewriter {
         ASTNode aliasNode = (ASTNode) node.getChild(0);
         ASTNode newAliasIdent = new ASTNode(new CommonToken(HiveParser.Identifier, newAlias));
         aliasNode.setChild(0, newAliasIdent);
-        newAliasIdent.setParent(aliasNode);
       } else {
         // Just a column ref, we need to make it alias.col
         // '.' will become the parent node
@@ -192,9 +191,7 @@ class AliasReplacer implements ContextRewriter {
         ASTNode tabRefNode = new ASTNode(new CommonToken(HiveParser.TOK_TABLE_OR_COL, "TOK_TABLE_OR_COL"));
 
         tabRefNode.addChild(aliasIdentNode);
-        aliasIdentNode.setParent(tabRefNode);
         dot.addChild(tabRefNode);
-        tabRefNode.setParent(dot);
 
         ASTNode colIdentNode = new ASTNode(new CommonToken(HiveParser.Identifier, colName));
 

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
index 9472506..7f13c6c 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AutoJoinContext.java
@@ -101,7 +101,7 @@ public class AutoJoinContext {
   }
 
   private JoinClause getJoinClause(CandidateFact fact) {
-    if (fact == null) {
+    if (fact == null || !factClauses.containsKey(fact)) {
       return minCostClause;
     }
     return factClauses.get(fact);

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 2338ba7..c305244 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -37,14 +37,14 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import org.antlr.runtime.CommonToken;
 
 import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 import lombok.Getter;
+import lombok.Setter;
 
 /**
  * Holds context of a candidate fact table.
  */
-public class CandidateFact implements CandidateTable {
+public class CandidateFact implements CandidateTable, QueryAST {
   final CubeFactTable fact;
   @Getter
   private Set<String> storageTables;
@@ -52,27 +52,32 @@ public class CandidateFact implements CandidateTable {
   private int numQueriedParts = 0;
   @Getter
   private final Set<FactPartition> partsQueried = Sets.newHashSet();
-  @Getter
-  private final Map<TimeRange, String> rangeToWhereClause = Maps.newHashMap();
 
   private CubeInterface baseTable;
+  @Getter @Setter
   private ASTNode selectAST;
+  @Getter @Setter
   private ASTNode whereAST;
-  private ASTNode groupbyAST;
+  @Getter @Setter
+  private ASTNode groupByAST;
+  @Getter @Setter
   private ASTNode havingAST;
-  private ASTNode joinTree;
+  @Getter @Setter
+  private ASTNode joinAST;
+  @Getter @Setter
+  private ASTNode orderByAST;
+  @Getter @Setter
+  private Integer limitValue;
   private List<TimeRangeNode> timenodes = Lists.newArrayList();
   private final List<Integer> selectIndices = Lists.newArrayList();
   private final List<Integer> dimFieldIndices = Lists.newArrayList();
   private Collection<String> columns;
   @Getter
-  private final Map<String, String> storgeWhereClauseMap = new HashMap<String, String>();
+  private final Map<String, String> storgeWhereClauseMap = new HashMap<>();
   @Getter
-  private final Map<TimeRange, Map<String, LinkedHashSet<FactPartition>>> rangeToStoragePartMap =
-    new HashMap<TimeRange, Map<String, LinkedHashSet<FactPartition>>>();
+  private final Map<TimeRange, Map<String, LinkedHashSet<FactPartition>>> rangeToStoragePartMap = new HashMap<>();
   @Getter
-  private final Map<TimeRange, Map<String, String>> rangeToStorageWhereMap =
-    new HashMap<TimeRange, Map<String, String>>();
+  private final Map<TimeRange, Map<String, String>> rangeToStorageWhereMap = new HashMap<>();
 
   CandidateFact(CubeFactTable fact, CubeInterface cube) {
     this.fact = fact;
@@ -114,57 +119,25 @@ public class CandidateFact implements CandidateTable {
     numQueriedParts += incr;
   }
 
-  private void updateTimeRanges(ASTNode root, ASTNode parent, int childIndex) throws LensException {
-    if (root == null) {
-      return;
-    } else if (root.getToken().getType() == TOK_FUNCTION) {
-      ASTNode fname = HQLParser.findNodeByPath(root, Identifier);
-      if (fname != null && CubeQueryContext.TIME_RANGE_FUNC.equalsIgnoreCase(fname.getText())) {
-        timenodes.add(new TimeRangeNode(root, parent, childIndex));
-      }
-    } else {
-      for (int i = 0; i < root.getChildCount(); i++) {
-        ASTNode child = (ASTNode) root.getChild(i);
-        updateTimeRanges(child, root, i);
-      }
-    }
-  }
-
   // copy ASTs from CubeQueryContext
   public void copyASTs(CubeQueryContext cubeql) throws LensException {
-    this.selectAST = HQLParser.copyAST(cubeql.getSelectAST());
-    this.whereAST = HQLParser.copyAST(cubeql.getWhereAST());
-    if (cubeql.getJoinTree() != null) {
-      this.joinTree = HQLParser.copyAST(cubeql.getJoinTree());
+    setSelectAST(HQLParser.copyAST(cubeql.getSelectAST()));
+    setWhereAST(HQLParser.copyAST(cubeql.getWhereAST()));
+    if (cubeql.getJoinAST() != null) {
+      setJoinAST(HQLParser.copyAST(cubeql.getJoinAST()));
     }
     if (cubeql.getGroupByAST() != null) {
-      this.groupbyAST = HQLParser.copyAST(cubeql.getGroupByAST());
+      setGroupByAST(HQLParser.copyAST(cubeql.getGroupByAST()));
     }
     if (cubeql.getHavingAST() != null) {
-      this.havingAST = HQLParser.copyAST(cubeql.getHavingAST());
+      setHavingAST(HQLParser.copyAST(cubeql.getHavingAST()));
     }
-    // copy timeranges
-    updateTimeRanges(this.whereAST, null, 0);
   }
 
   public String getWhereClause(String storageTable) {
     return getStorgeWhereClauseMap().get(storageTable);
   }
 
-  public void updateTimeranges(CubeQueryContext cubeql) throws LensException {
-    // Update WhereAST with range clause
-    // resolve timerange positions and replace it by corresponding where clause
-    for (int i = 0; i < cubeql.getTimeRanges().size(); i++) {
-      TimeRange range = cubeql.getTimeRanges().get(i);
-      String rangeWhere = rangeToWhereClause.get(range);
-      if (!StringUtils.isBlank(rangeWhere)) {
-        ASTNode rangeAST = HQLParser.parseExpr(rangeWhere);
-        rangeAST.setParent(timenodes.get(i).parent);
-        timenodes.get(i).parent.setChild(timenodes.get(i).childIndex, rangeAST);
-      }
-    }
-  }
-
   /**
    * Update the ASTs to include only the fields queried from this fact, in all the expressions
    *
@@ -322,54 +295,15 @@ public class CandidateFact implements CandidateTable {
     return null;
   }
 
-  public String getGroupbyTree() {
-    if (groupbyAST != null) {
-      return HQLParser.getString(groupbyAST);
+  @Override
+  public String getOrderByTree() {
+    if (orderByAST != null) {
+      return HQLParser.getString(orderByAST);
     }
     return null;
   }
 
-  /**
-   * @return the selectAST
-   */
-  public ASTNode getSelectAST() {
-    return selectAST;
-  }
-
-  /**
-   * @param selectAST the selectAST to set
-   */
-  public void setSelectAST(ASTNode selectAST) {
-    this.selectAST = selectAST;
-  }
-
-  /**
-   * @return the whereAST
-   */
-  public ASTNode getWhereAST() {
-    return whereAST;
-  }
 
-  /**
-   * @param whereAST the whereAST to set
-   */
-  public void setWhereAST(ASTNode whereAST) {
-    this.whereAST = whereAST;
-  }
-
-  /**
-   * @return the havingAST
-   */
-  public ASTNode getHavingAST() {
-    return havingAST;
-  }
-
-  /**
-   * @param havingAST the havingAST to set
-   */
-  public void setHavingAST(ASTNode havingAST) {
-    this.havingAST = havingAST;
-  }
 
   /**
    * @return the selectIndices
@@ -385,13 +319,9 @@ public class CandidateFact implements CandidateTable {
     return dimFieldIndices;
   }
 
-  public ASTNode getGroupByAST() {
-    return groupbyAST;
-  }
-
   public String getGroupByTree() {
-    if (groupbyAST != null) {
-      return HQLParser.getString(groupbyAST);
+    if (groupByAST != null) {
+      return HQLParser.getString(groupByAST);
     }
     return null;
   }
@@ -413,8 +343,4 @@ public class CandidateFact implements CandidateTable {
     }
     return timePartDimensions;
   }
-
-  public ASTNode getJoinTree() {
-    return joinTree;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
index b95595a..75aa3f4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
@@ -61,7 +61,7 @@ class ColumnResolver implements ContextRewriter {
     }
     getColsForSelectTree(cubeql);
     getColsForWhereTree(cubeql);
-    getColsForTree(cubeql, cubeql.getJoinTree(), cubeql);
+    getColsForTree(cubeql, cubeql.getJoinAST(), cubeql);
     getColsForTree(cubeql, cubeql.getGroupByAST(), cubeql);
     getColsForTree(cubeql, cubeql.getHavingAST(), cubeql);
     getColsForTree(cubeql, cubeql.getOrderByAST(), cubeql);

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 4034a54..1fd1d17 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -21,9 +21,7 @@ package org.apache.lens.cube.parse;
 
 import static org.apache.lens.cube.parse.CubeQueryConfUtil.*;
 
-import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TABLE_OR_COL;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TMP_FILE;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
 import static com.google.common.base.Preconditions.checkArgument;
 
@@ -50,16 +48,11 @@ import org.codehaus.jackson.map.ObjectMapper;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
-
-import lombok.AllArgsConstructor;
-import lombok.Data;
-import lombok.Getter;
-import lombok.Setter;
-import lombok.ToString;
+import lombok.*;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
-public class CubeQueryContext implements TrackQueriedColumns {
+public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   public static final String TIME_RANGE_FUNC = "time_range_in";
   public static final String NOW = "now";
   public static final String DEFAULT_TABLE = "_default_";
@@ -653,7 +646,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return null;
   }
 
-  public ASTNode getJoinTree() {
+  public ASTNode getJoinAST() {
     return qb.getParseInfo().getJoinExpr();
   }
 
@@ -688,8 +681,8 @@ public class CubeQueryContext implements TrackQueriedColumns {
   }
 
   String getQBFromString(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
-    String fromString = null;
-    if (getJoinTree() == null) {
+    String fromString;
+    if (getJoinAST() == null) {
       if (cube != null) {
         fromString = fact.getStorageString(getAliasForTableName(cube.getName()));
       } else {
@@ -858,6 +851,23 @@ public class CubeQueryContext implements TrackQueriedColumns {
   @Getter private Collection<CandidateFact> pickedFacts;
   @Getter private Collection<CandidateDim> pickedDimTables;
 
+  private void addRangeClauses(CandidateFact fact) throws LensException {
+    if (fact != null) {
+      // resolve timerange positions and replace it by corresponding where clause
+      for (TimeRange range : getTimeRanges()) {
+        for (Map.Entry<String, String> entry : fact.getRangeToStorageWhereMap().get(range).entrySet()) {
+          String table = entry.getKey();
+          String rangeWhere = entry.getValue();
+          if (!StringUtils.isBlank(rangeWhere)) {
+            ASTNode rangeAST = HQLParser.parseExpr(rangeWhere);
+            range.getParent().setChild(range.getChildIndex(), rangeAST);
+          }
+          fact.getStorgeWhereClauseMap().put(table, getWhereTree());
+        }
+      }
+    }
+  }
+
   public String toHQL() throws LensException {
     Set<CandidateFact> cfacts = pickCandidateFactToQuery();
     Map<Dimension, CandidateDim> dimsToQuery = pickCandidateDimsToQuery(dimensions);
@@ -872,11 +882,13 @@ public class CubeQueryContext implements TrackQueriedColumns {
         // copy ASTs for each fact
         for (CandidateFact cfact : cfacts) {
           cfact.copyASTs(this);
-          cfact.updateTimeranges(this);
-          factDimMap.put(cfact, new HashSet<Dimension>(dimsToQuery.keySet()));
+          factDimMap.put(cfact, new HashSet<>(dimsToQuery.keySet()));
         }
-      } else {
-        SingleFactHQLContext.addRangeClauses(this, cfacts.iterator().next());
+      }
+    }
+    if (cfacts != null) {
+      for (CandidateFact fact : cfacts) {
+        addRangeClauses(fact);
       }
     }
 
@@ -884,7 +896,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
     Set<Dimension> exprDimensions = new HashSet<Dimension>();
     if (cfacts != null) {
       for (CandidateFact cfact : cfacts) {
-        Set<Dimension> factExprDimTables = exprCtx.rewriteExprCtx(cfact, dimsToQuery, cfacts.size() > 1);
+        Set<Dimension> factExprDimTables = exprCtx.rewriteExprCtx(cfact, dimsToQuery, cfacts.size() > 1 ? cfact : this);
         exprDimensions.addAll(factExprDimTables);
         if (cfacts.size() > 1) {
           factDimMap.get(cfact).addAll(factExprDimTables);
@@ -892,7 +904,7 @@ public class CubeQueryContext implements TrackQueriedColumns {
       }
     } else {
       // dim only query
-      exprDimensions.addAll(exprCtx.rewriteExprCtx(null, dimsToQuery, false));
+      exprDimensions.addAll(exprCtx.rewriteExprCtx(null, dimsToQuery, this));
     }
     dimsToQuery.putAll(pickCandidateDimsToQuery(exprDimensions));
 
@@ -940,25 +952,22 @@ public class CubeQueryContext implements TrackQueriedColumns {
         }
       }
     }
-    hqlContext = createHQLContext(cfacts, dimsToQuery, factDimMap, this);
+    hqlContext = createHQLContext(cfacts, dimsToQuery, factDimMap);
     return hqlContext.toHQL();
   }
 
   private HQLContextInterface createHQLContext(Set<CandidateFact> facts, Map<Dimension, CandidateDim> dimsToQuery,
-    Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws LensException {
+    Map<CandidateFact, Set<Dimension>> factDimMap) throws LensException {
     if (facts == null || facts.size() == 0) {
-      return new DimOnlyHQLContext(dimsToQuery, query);
+      return new DimOnlyHQLContext(dimsToQuery, this, this);
     } else if (facts.size() == 1 && facts.iterator().next().getStorageTables().size() > 1) {
       //create single fact with multiple storage context
-      if (!conf.getBoolean(ENABLE_STORAGES_UNION, DEFAULT_ENABLE_STORAGES_UNION)) {
-        throw new LensException(LensCubeErrorCode.STORAGE_UNION_DISABLED.getLensErrorInfo());
-      }
-      return new SingleFactMultiStorageHQLContext(facts.iterator().next(), dimsToQuery, query);
+      return new SingleFactMultiStorageHQLContext(facts.iterator().next(), dimsToQuery, this, this);
     } else if (facts.size() == 1 && facts.iterator().next().getStorageTables().size() == 1) {
       // create single fact context
-      return new SingleFactHQLContext(facts.iterator().next(), dimsToQuery, query);
+      return new SingleFactSingleStorageHQLContext(facts.iterator().next(), dimsToQuery, this, this);
     } else {
-      return new MultiFactHQLContext(facts, dimsToQuery, factDimMap, query);
+      return new MultiFactHQLContext(facts, dimsToQuery, factDimMap, this);
     }
   }
 
@@ -979,10 +988,6 @@ public class CubeQueryContext implements TrackQueriedColumns {
     return tblAliasToColumns.get(getAliasForTableName(tblName));
   }
 
-  public void addColumnsQueried(AbstractCubeTable table, String column) {
-    addColumnsQueried(getAliasForTableName(table.getName()), column);
-  }
-
   public void addColumnsQueriedWithTimeDimCheck(String alias, String timeDimColumn) {
 
     if (!shouldReplaceTimeDimWithPart()) {

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
index e0759b0..c1fd0a5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
@@ -148,7 +148,7 @@ public class CubeQueryRewriter {
     // Resolve candidate fact tables and dimension tables for columns queried
     rewriters.add(candidateTblResolver);
     // Resolve aggregations and generate base select tree
-    rewriters.add(new AggregateResolver(conf));
+    rewriters.add(new AggregateResolver());
     rewriters.add(new GroupbyResolver(conf));
     rewriters.add(new FieldValidator());
     // Resolve joins and generate base join tree

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
new file mode 100644
index 0000000..0997f37
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+
+import lombok.AllArgsConstructor;
+import lombok.Data;
+
+@Data
+@AllArgsConstructor
+public class DefaultQueryAST implements QueryAST {
+  private ASTNode selectAST, whereAST, groupByAST, havingAST, joinAST, orderByAST;
+  private Integer limitValue;
+
+  public String getSelectTree() {
+    return HQLParser.getString(selectAST);
+  }
+
+  public String getWhereTree() {
+    if (whereAST != null) {
+      return HQLParser.getString(whereAST);
+    }
+    return null;
+  }
+
+  public String getGroupByTree() {
+    if (groupByAST != null) {
+      return HQLParser.getString(groupByAST);
+    }
+    return null;
+  }
+
+
+  public String getHavingTree() {
+    if (havingAST != null) {
+      return HQLParser.getString(havingAST);
+    }
+    return null;
+  }
+
+  @Override
+  public String getOrderByTree() {
+    if (orderByAST != null) {
+      return HQLParser.getString(orderByAST);
+    }
+    return null;
+  }
+
+  public static DefaultQueryAST fromCandidateFact(CandidateFact fact, String storageTable, QueryAST ast) throws
+    LensException {
+    return new DefaultQueryAST(ast.getSelectAST(),
+      HQLParser.parseExpr(fact.getWhereClause(storageTable.substring(storageTable.indexOf(".") + 1))),
+      ast.getGroupByAST(), ast.getHavingAST(), ast.getJoinAST(), ast.getOrderByAST(), ast.getLimitValue());
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index a576f3a..5c8bd84 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -281,19 +281,15 @@ public class DenormalizationResolver implements ContextRewriter {
     }
 
     private void replaceReferencedColumns(CandidateFact cfact, boolean replaceFact) throws LensException {
+      QueryAST ast = cubeql;
       if (replaceFact
         && (tableToRefCols.get(cfact.getName()) != null && !tableToRefCols.get(cfact.getName()).isEmpty())) {
-        resolveClause(cubeql, cfact.getSelectAST());
-        resolveClause(cubeql, cfact.getWhereAST());
-        resolveClause(cubeql, cfact.getGroupByAST());
-        resolveClause(cubeql, cfact.getHavingAST());
-      } else {
-        resolveClause(cubeql, cubeql.getSelectAST());
-        resolveClause(cubeql, cubeql.getWhereAST());
-        resolveClause(cubeql, cubeql.getGroupByAST());
-        resolveClause(cubeql, cubeql.getHavingAST());
-
+        ast = cfact;
       }
+      resolveClause(cubeql, ast.getSelectAST());
+      resolveClause(cubeql, ast.getWhereAST());
+      resolveClause(cubeql, ast.getGroupByAST());
+      resolveClause(cubeql, ast.getHavingAST());
       resolveClause(cubeql, cubeql.getOrderByAST());
     }
 
@@ -320,11 +316,9 @@ public class DenormalizationResolver implements ContextRewriter {
         ASTNode newTableNode =
           new ASTNode(new CommonToken(HiveParser.Identifier, query.getAliasForTableName(refered.getDestTable())));
         tableNode.setChild(0, newTableNode);
-        newTableNode.setParent(tableNode);
 
         ASTNode newColumnNode = new ASTNode(new CommonToken(HiveParser.Identifier, refered.getRefColumn()));
         node.setChild(1, newColumnNode);
-        newColumnNode.setParent(node);
       } else {
         // recurse down
         for (int i = 0; i < node.getChildCount(); i++) {

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
index bcfc1f6..b253b94 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
@@ -44,7 +44,11 @@ abstract class DimHQLContext extends SimpleHQLContext {
   public CubeQueryContext getQuery() {
     return query;
   }
-
+  DimHQLContext(CubeQueryContext query, Map<Dimension, CandidateDim> dimsToQuery,
+    Set<Dimension> queriedDims, QueryAST ast) throws LensException {
+    this(query, dimsToQuery, queriedDims, ast.getSelectTree(), ast.getWhereTree(), ast.getGroupByTree(),
+      ast.getOrderByTree(), ast.getHavingTree(), ast.getLimitValue());
+  }
   DimHQLContext(CubeQueryContext query, Map<Dimension, CandidateDim> dimsToQuery,
     Set<Dimension> queriedDims, String select, String where,
     String groupby, String orderby, String having, Integer limit) throws LensException {

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
index 0c43d98..d22287b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
@@ -32,16 +32,15 @@ import org.apache.lens.server.api.error.LensException;
  */
 class DimOnlyHQLContext extends DimHQLContext {
 
-  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query) throws LensException {
-    super(query, dimsToQuery, dimsToQuery.keySet(), query.getSelectTree(),
-      query.getWhereTree(), query.getGroupByTree(), query.getOrderByTree(),
-      query.getHavingTree(), query.getLimitValue());
+  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query, QueryAST ast)
+    throws LensException {
+    this(dimsToQuery, dimsToQuery.keySet(), query, ast);
   }
 
-  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query, String whereClause)
+  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, Set<Dimension> dimsQueried,
+    CubeQueryContext query, QueryAST ast)
     throws LensException {
-    super(query, dimsToQuery, dimsToQuery.keySet(), query.getSelectTree(), whereClause, query.getGroupByTree(), query
-        .getOrderByTree(), query.getHavingTree(), query.getLimitValue());
+    super(query, dimsToQuery, dimsQueried, ast);
   }
 
   public String toHQL() throws LensException {
@@ -49,7 +48,7 @@ class DimOnlyHQLContext extends DimHQLContext {
   }
 
   protected String getFromTable() throws LensException {
-    if (query.getAutoJoinCtx() != null && query.getAutoJoinCtx().isJoinsResolved()) {
+    if (query.isAutoJoinResolved()) {
       return getDimsToQuery().get(query.getAutoJoinCtx().getAutoJoinTarget()).getStorageString(
         query.getAliasForTableName(query.getAutoJoinCtx().getAutoJoinTarget().getName()));
     } else {

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 776021d..26514d8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -432,7 +432,7 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     public Set<Dimension> rewriteExprCtx(CandidateFact cfact, Map<Dimension, CandidateDim> dimsToQuery,
-      boolean replaceFact) throws LensException {
+      QueryAST queryAST) throws LensException {
       Set<Dimension> exprDims = new HashSet<Dimension>();
       if (!allExprsQueried.isEmpty()) {
         // pick expressions for fact
@@ -446,7 +446,7 @@ class ExpressionResolver implements ContextRewriter {
           }
         }
         // Replace picked expressions in all the base trees
-        replacePickedExpressions(cfact, replaceFact);
+        replacePickedExpressions(queryAST);
         for (Set<PickedExpression> peSet : pickedExpressions.values()) {
           for (PickedExpression pe : peSet) {
             exprDims.addAll(pe.pickedCtx.exprDims);
@@ -457,21 +457,13 @@ class ExpressionResolver implements ContextRewriter {
       return exprDims;
     }
 
-    private void replacePickedExpressions(CandidateFact cfact, boolean replaceFact)
+    private void replacePickedExpressions(QueryAST queryAST)
       throws LensException {
-      if (replaceFact) {
-        replaceAST(cubeql, cfact.getSelectAST());
-        replaceAST(cubeql, cfact.getWhereAST());
-        replaceAST(cubeql, cfact.getJoinTree());
-        replaceAST(cubeql, cfact.getGroupByAST());
-        replaceAST(cubeql, cfact.getHavingAST());
-      } else {
-        replaceAST(cubeql, cubeql.getSelectAST());
-        replaceAST(cubeql, cubeql.getWhereAST());
-        replaceAST(cubeql, cubeql.getJoinTree());
-        replaceAST(cubeql, cubeql.getGroupByAST());
-        replaceAST(cubeql, cubeql.getHavingAST());
-      }
+      replaceAST(cubeql, queryAST.getSelectAST());
+      replaceAST(cubeql, queryAST.getWhereAST());
+      replaceAST(cubeql, queryAST.getJoinAST());
+      replaceAST(cubeql, queryAST.getGroupByAST());
+      replaceAST(cubeql, queryAST.getHavingAST());
       replaceAST(cubeql, cubeql.getOrderByAST());
     }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
deleted file mode 100644
index 6c44233..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.server.api.error.LensException;
-
-import lombok.extern.slf4j.Slf4j;
-
-/**
- * HQL context class which passes all query strings from the fact and works with required dimensions for the fact.
- */
-@Slf4j
-public class FactHQLContext extends DimHQLContext {
-
-  private final CandidateFact fact;
-  private final Set<Dimension> factDims;
-
-  FactHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, Set<Dimension> factDims,
-    CubeQueryContext query) throws LensException {
-    super(query, dimsToQuery, factDims, fact.getSelectTree(), fact.getWhereTree(), fact.getGroupByTree(), null, fact
-      .getHavingTree(), null);
-    this.fact = fact;
-    this.factDims = factDims;
-    log.info("factDims:{} for fact:{}", factDims, fact);
-  }
-
-  @Override
-  protected Set<Dimension> getQueriedDimSet() {
-    return factDims;
-  }
-
-  @Override
-  protected CandidateFact getQueriedFact() {
-    return fact;
-  }
-
-  protected String getFromTable() throws LensException {
-    return query.getQBFromString(fact, getDimsToQuery());
-  }
-
-  public CandidateFact getFactToQuery() {
-    return fact;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
index da74713..9674f73 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
@@ -78,7 +78,6 @@ class GroupbyResolver implements ContextRewriter {
               if (groupbyAST != null) {
                 // groupby ast exists, add the expression to AST
                 groupbyAST.addChild(exprAST);
-                exprAST.setParent(groupbyAST);
               } else {
                 // no group by ast exist, create one
                 ASTNode newAST = new ASTNode(new CommonToken(TOK_GROUPBY));
@@ -153,7 +152,6 @@ class GroupbyResolver implements ContextRewriter {
       parent.setChild(i + 1, ch);
     }
     parent.setChild(index, child);
-    child.setParent(parent);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index 7cea7d5..6c3d4c3 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -293,7 +293,6 @@ public final class HQLParser {
     if (original.getChildren() != null) {
       for (Object o : original.getChildren()) {
         ASTNode childCopy = copyAST((ASTNode) o);
-        childCopy.setParent(copy);
         copy.addChild(childCopy);
       }
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index 1385584..de3a16e 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -87,8 +87,8 @@ class JoinResolver implements ContextRewriter {
     boolean joinResolverDisabled = cubeql.getConf().getBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS,
         CubeQueryConfUtil.DEFAULT_DISABLE_AUTO_JOINS);
     if (joinResolverDisabled) {
-      if (cubeql.getJoinTree() != null) {
-        cubeQB.setQbJoinTree(genJoinTree(cubeQB, cubeql.getJoinTree(), cubeql));
+      if (cubeql.getJoinAST() != null) {
+        cubeQB.setQbJoinTree(genJoinTree(cubeql.getJoinAST(), cubeql));
       }
     } else {
       autoResolveJoins(cubeql);
@@ -336,7 +336,7 @@ class JoinResolver implements ContextRewriter {
   }
 
   // Recursively find out join conditions
-  private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree, CubeQueryContext cubeql) throws LensException {
+  private QBJoinTree genJoinTree(ASTNode joinParseTree, CubeQueryContext cubeql) throws LensException {
     QBJoinTree joinTree = new QBJoinTree();
     JoinCond[] condn = new JoinCond[1];
 
@@ -388,7 +388,7 @@ class JoinResolver implements ContextRewriter {
 
     } else if (isJoinToken(left)) {
       // Left subtree is join token itself, so recurse down
-      QBJoinTree leftTree = genJoinTree(qb, left, cubeql);
+      QBJoinTree leftTree = genJoinTree(left, cubeql);
 
       joinTree.setJoinSrc(leftTree);
 
@@ -436,12 +436,9 @@ class JoinResolver implements ContextRewriter {
     return joinTree;
   }
 
-  private boolean isJoinToken(ASTNode node) {
-    if ((node.getToken().getType() == TOK_JOIN) || (node.getToken().getType() == TOK_LEFTOUTERJOIN)
+  private static boolean isJoinToken(ASTNode node) {
+    return (node.getToken().getType() == TOK_JOIN) || (node.getToken().getType() == TOK_LEFTOUTERJOIN)
       || (node.getToken().getType() == TOK_RIGHTOUTERJOIN) || (node.getToken().getType() == TOK_FULLOUTERJOIN)
-      || (node.getToken().getType() == TOK_LEFTSEMIJOIN) || (node.getToken().getType() == TOK_UNIQUEJOIN)) {
-      return true;
-    }
-    return false;
+      || (node.getToken().getType() == TOK_LEFTSEMIJOIN) || (node.getToken().getType() == TOK_UNIQUEJOIN);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
index 113d8de..1a729f8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
@@ -34,18 +34,24 @@ import com.google.common.collect.Lists;
  */
 class MultiFactHQLContext extends SimpleHQLContext {
 
-  private Map<Dimension, CandidateDim> dimsToQuery;
   private Set<CandidateFact> facts;
   private CubeQueryContext query;
-  private Map<CandidateFact, Set<Dimension>> factDimMap;
+  private Map<CandidateFact, SimpleHQLContext> factHQLContextMap = new HashMap<>();
 
   MultiFactHQLContext(Set<CandidateFact> facts, Map<Dimension, CandidateDim> dimsToQuery,
     Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) throws LensException {
     super();
     this.query = query;
     this.facts = facts;
-    this.dimsToQuery = dimsToQuery;
-    this.factDimMap = factDimMap;
+    for (CandidateFact fact : facts) {
+      if (fact.getStorageTables().size() > 1) {
+        factHQLContextMap.put(fact, new SingleFactMultiStorageHQLContext(fact, dimsToQuery, query, fact));
+      } else {
+        factHQLContextMap.put(fact,
+          new SingleFactSingleStorageHQLContext(fact, dimsToQuery, factDimMap.get(fact), query,
+            DefaultQueryAST.fromCandidateFact(fact, fact.getStorageTables().iterator().next(), fact)));
+      }
+    }
   }
 
   protected void setMissingExpressions() throws LensException {
@@ -78,8 +84,7 @@ class MultiFactHQLContext extends SimpleHQLContext {
   }
 
   private String getSelectString() throws LensException {
-    Map<Integer, List<Integer>> selectToFactIndex =
-      new HashMap<Integer, List<Integer>>(query.getSelectAST().getChildCount());
+    Map<Integer, List<Integer>> selectToFactIndex = new HashMap<>(query.getSelectAST().getChildCount());
     int fi = 1;
     for (CandidateFact fact : facts) {
       for (int ind : fact.getSelectIndices()) {
@@ -116,33 +121,14 @@ class MultiFactHQLContext extends SimpleHQLContext {
     return select.toString();
   }
 
-  public Map<Dimension, CandidateDim> getDimsToQuery() {
-    return dimsToQuery;
-  }
-
-  public Set<CandidateFact> getFactsToQuery() {
-    return facts;
-  }
-
   private String getFromString() throws LensException {
     StringBuilder fromBuilder = new StringBuilder();
     int aliasCount = 1;
-    Iterator<CandidateFact> iter = facts.iterator();
-    while (iter.hasNext()) {
-      CandidateFact fact = iter.next();
-      if (fact.getStorageTables().size() > 1) {
-        // Not supported right now.
-        throw new LensException(LensCubeErrorCode.STORAGE_UNION_DISABLED.getLensErrorInfo());
-      }
-      FactHQLContext facthql = new FactHQLContext(fact, dimsToQuery, factDimMap.get(fact), query);
-      fromBuilder.append("(");
-      fromBuilder.append(facthql.toHQL());
-      fromBuilder.append(")");
-      fromBuilder.append(" mq" + aliasCount);
-      aliasCount++;
-      if (iter.hasNext()) {
-        fromBuilder.append(" full outer join ");
-      }
+    String sep = "";
+    for (CandidateFact fact : facts) {
+      SimpleHQLContext facthql = factHQLContextMap.get(fact);
+      fromBuilder.append(sep).append("(").append(facthql.toHQL()).append(")").append(" mq").append(aliasCount++);
+      sep = " full outer join ";
     }
     CandidateFact firstFact = facts.iterator().next();
     if (!firstFact.getDimFieldIndices().isEmpty()) {

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
new file mode 100644
index 0000000..31680ca
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+
+
+interface QueryAST {
+
+  String getSelectTree();
+
+  String getWhereTree();
+
+  String getHavingTree();
+
+  String getOrderByTree();
+
+  String getGroupByTree();
+
+  Integer getLimitValue();
+
+  void setLimitValue(Integer integer);
+
+  /**
+   * @return the selectAST
+   */
+
+  ASTNode getSelectAST();
+
+  /**
+   * @param selectAST the selectAST to set
+   */
+
+  void setSelectAST(ASTNode selectAST);
+
+  /**
+   * @return the whereAST
+   */
+
+  ASTNode getWhereAST();
+
+  /**
+   * @param whereAST the whereAST to set
+   */
+
+  void setWhereAST(ASTNode whereAST);
+
+  /**
+   * @return the havingAST
+   */
+
+  ASTNode getHavingAST();
+
+  /**
+   * @param havingAST the havingAST to set
+   */
+
+  void setHavingAST(ASTNode havingAST);
+
+  ASTNode getGroupByAST();
+
+  void setGroupByAST(ASTNode havingAST);
+
+  ASTNode getJoinAST();
+
+  ASTNode getOrderByAST();
+
+  void setOrderByAST(ASTNode node);
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
deleted file mode 100644
index de52b0a..0000000
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import java.util.Map;
-
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.cube.metadata.TimeRange;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-
-/**
- * HQL context class which passes down all query strings to come from DimOnlyHQLContext and works with fact being
- * queried.
- * <p/>
- * Updates from string with join clause expanded
- */
-class SingleFactHQLContext extends DimOnlyHQLContext {
-
-  private final CandidateFact fact;
-  private String storageAlias;
-
-  SingleFactHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
-    throws LensException {
-    super(dimsToQuery, query);
-    this.fact = fact;
-  }
-
-  SingleFactHQLContext(CandidateFact fact, String storageAlias, Map<Dimension, CandidateDim> dimsToQuery,
-      CubeQueryContext query, String whereClause) throws LensException {
-    super(dimsToQuery, query, whereClause);
-    this.fact = fact;
-    this.storageAlias = storageAlias;
-  }
-
-
-  public CandidateFact getFactToQuery() {
-    return fact;
-  }
-
-  static void addRangeClauses(CubeQueryContext query, CandidateFact fact) throws LensException {
-    if (fact != null) {
-      // resolve timerange positions and replace it by corresponding where
-      // clause
-      for (TimeRange range : query.getTimeRanges()) {
-        for (Map.Entry<String, String> entry : fact.getRangeToStorageWhereMap().get(range).entrySet()) {
-          String table = entry.getValue();
-          String rangeWhere = entry.getKey();
-
-          if (!StringUtils.isBlank(rangeWhere)) {
-            ASTNode rangeAST = HQLParser.parseExpr(rangeWhere);
-            rangeAST.setParent(range.getParent());
-            range.getParent().setChild(range.getChildIndex(), rangeAST);
-          }
-          fact.getStorgeWhereClauseMap().put(table, query.getWhereTree());
-        }
-      }
-    }
-  }
-
-
-  @Override
-  protected String getFromTable() throws LensException {
-    if (getQuery().getAutoJoinCtx() != null && getQuery().getAutoJoinCtx().isJoinsResolved()) {
-      if (storageAlias != null) {
-        return storageAlias;
-      } else {
-        return fact.getStorageString(getQuery().getAliasForTableName(getQuery().getCube().getName()));
-      }
-    } else {
-      if (fact.getStorageTables().size() == 1) {
-        return getQuery().getQBFromString(fact, getDimsToQuery());
-      } else {
-        return storageAlias;
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
index 96b1d05..e531e6b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
@@ -19,6 +19,8 @@
 
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.DEFAULT_ENABLE_STORAGES_UNION;
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.ENABLE_STORAGES_UNION;
 import static org.apache.lens.cube.parse.HQLParser.*;
 
 import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
@@ -27,6 +29,7 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.Dimension;
 import org.apache.lens.server.api.error.LensException;
 
@@ -42,6 +45,7 @@ import lombok.Data;
 
 public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
 
+  private final QueryAST ast;
   int aliasCounter = 0;
 
   @Data
@@ -81,29 +85,33 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
 
   private Map<HashableASTNode, ASTNode> innerToOuterASTs = new HashMap<>();
 
-  SingleFactMultiStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext query)
+  SingleFactMultiStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery,
+    CubeQueryContext query, QueryAST ast)
     throws LensException {
     super(query, fact);
+    if (!query.getConf().getBoolean(ENABLE_STORAGES_UNION, DEFAULT_ENABLE_STORAGES_UNION)) {
+      throw new LensException(LensCubeErrorCode.STORAGE_UNION_DISABLED.getLensErrorInfo());
+    }
+    this.ast = ast;
     processSelectAST();
     processGroupByAST();
     processWhereAST();
     processHavingAST();
     processOrderByAST();
     processLimit();
-    setHqlContexts(getUnionContexts(fact, dimsToQuery, query));
+    setHqlContexts(getUnionContexts(fact, dimsToQuery, query, ast));
   }
 
   private void processSelectAST() {
-    query.getSelectFinalAliases().clear();
-    ASTNode originalSelectAST = copyAST(query.getSelectAST());
-    query.setSelectAST(new ASTNode(originalSelectAST.getToken()));
+    ASTNode originalSelectAST = copyAST(ast.getSelectAST());
+    ast.setSelectAST(new ASTNode(originalSelectAST.getToken()));
     ASTNode outerSelectAST = processExpression(originalSelectAST);
     setSelect(getString(outerSelectAST));
   }
 
   private void processGroupByAST() {
-    if (query.getGroupByAST() != null) {
-      setGroupby(getString(processExpression(query.getGroupByAST())));
+    if (ast.getGroupByAST() != null) {
+      setGroupby(getString(processExpression(ast.getGroupByAST())));
     }
   }
 
@@ -111,28 +119,29 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     for (String storageTable : fact.getStorgeWhereClauseMap().keySet()) {
       ASTNode tree = parseExpr(fact.getStorgeWhereClauseMap().get(storageTable));
       ASTNode replaced = replaceAST(tree);
+      //TODO: optimize parse/unparse cycle
       fact.getStorgeWhereClauseMap().put(storageTable, getString(replaced));
     }
   }
 
   private void processHavingAST() throws LensException {
-    if (query.getHavingAST() != null) {
-      setHaving(getString(processExpression(query.getHavingAST())));
-      query.setHavingAST(null);
+    if (ast.getHavingAST() != null) {
+      setHaving(getString(processExpression(ast.getHavingAST())));
+      ast.setHavingAST(null);
     }
   }
 
 
   private void processOrderByAST() {
-    if (query.getOrderByAST() != null) {
-      setOrderby(getString(processExpression(query.getOrderByAST())));
-      query.setOrderByAST(null);
+    if (ast.getOrderByAST() != null) {
+      setOrderby(getString(processExpression(ast.getOrderByAST())));
+      ast.setOrderByAST(null);
     }
   }
 
   private void processLimit() {
-    setLimit(query.getLimitValue());
-    query.setLimitValue(null);
+    setLimit(ast.getLimitValue());
+    ast.setLimitValue(null);
   }
 
   /*
@@ -171,6 +180,7 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
       addToInnerSelectAST(innerSelectExprAST);
       ASTNode dotAST = getDotAST(query.getCube().getName(), alias);
       ASTNode outerAST = new ASTNode(new CommonToken(TOK_FUNCTION));
+      //TODO: take care or non-transitive aggregate functions
       outerAST.addChild(new ASTNode(new CommonToken(Identifier, astNode.getChild(0).getText())));
       outerAST.addChild(dotAST);
       innerToOuterASTs.put(new HashableASTNode(innerSelectASTWithoutAlias), outerAST);
@@ -225,10 +235,10 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
   }
 
   private void addToInnerSelectAST(ASTNode selectExprAST) {
-    if (query.getSelectAST() == null) {
-      query.setSelectAST(new ASTNode(new CommonToken(TOK_SELECT)));
+    if (ast.getSelectAST() == null) {
+      ast.setSelectAST(new ASTNode(new CommonToken(TOK_SELECT)));
     }
-    query.getSelectAST().addChild(selectExprAST);
+    ast.getSelectAST().addChild(selectExprAST);
   }
 
   private ASTNode getDotAST(String tableAlias, String fieldAlias) {
@@ -245,13 +255,13 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
   }
 
   private static ArrayList<HQLContextInterface> getUnionContexts(CandidateFact fact, Map<Dimension, CandidateDim>
-    dimsToQuery, CubeQueryContext query)
+    dimsToQuery, CubeQueryContext query, QueryAST ast)
     throws LensException {
     ArrayList<HQLContextInterface> contexts = new ArrayList<>();
     String alias = query.getAliasForTableName(query.getCube().getName());
     for (String storageTable : fact.getStorageTables()) {
-      SingleFactHQLContext ctx = new SingleFactHQLContext(fact, storageTable + " " + alias, dimsToQuery, query,
-        fact.getWhereClause(storageTable.substring(storageTable.indexOf(".") + 1)));
+      SingleFactSingleStorageHQLContext ctx = new SingleFactSingleStorageHQLContext(fact, storageTable + " " + alias,
+        dimsToQuery, query, DefaultQueryAST.fromCandidateFact(fact, storageTable, ast));
       contexts.add(ctx);
     }
     return contexts;

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java
new file mode 100644
index 0000000..b1a3b3f
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.lens.cube.metadata.Dimension;
+import org.apache.lens.server.api.error.LensException;
+
+/**
+ * HQL context class which passes down all query strings to come from DimOnlyHQLContext and works with fact being
+ * queried.
+ * <p/>
+ * Updates from string with join clause expanded
+ */
+class SingleFactSingleStorageHQLContext extends DimOnlyHQLContext {
+
+  private final CandidateFact fact;
+  private final Set<Dimension> queriedDimSet;
+  private String storageAlias;
+
+  SingleFactSingleStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery,
+    CubeQueryContext query, QueryAST ast)
+    throws LensException {
+    this(fact, dimsToQuery, dimsToQuery.keySet(), query, ast);
+  }
+
+  SingleFactSingleStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery,
+    Set<Dimension> dimsQueried, CubeQueryContext query, QueryAST ast)
+    throws LensException {
+    super(dimsToQuery, dimsQueried, query, ast);
+    this.fact = fact;
+    this.queriedDimSet = dimsQueried;
+  }
+
+  SingleFactSingleStorageHQLContext(CandidateFact fact, String storageAlias, Map<Dimension, CandidateDim> dimsToQuery,
+    CubeQueryContext query, QueryAST ast) throws LensException {
+    this(fact, dimsToQuery, query, ast);
+    this.storageAlias = storageAlias;
+  }
+
+  @Override
+  protected String getFromTable() throws LensException {
+    if (getQuery().isAutoJoinResolved()) {
+      if (storageAlias != null) {
+        return storageAlias;
+      } else {
+        return fact.getStorageString(query.getAliasForTableName(query.getCube().getName()));
+      }
+    } else {
+      if (fact.getStorageTables().size() == 1) {
+        return getQuery().getQBFromString(fact, getDimsToQuery());
+      } else {
+        return storageAlias;
+      }
+    }
+  }
+
+  @Override
+  protected CandidateFact getQueriedFact() {
+    return fact;
+  }
+
+  @Override
+  public Set<Dimension> getQueriedDimSet() {
+    return queriedDimSet;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index 62cc071..14def15 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -19,14 +19,10 @@
 package org.apache.lens.cube.parse;
 
 import static org.apache.lens.cube.metadata.DateUtil.WSPACE;
-import static org.apache.lens.cube.metadata.MetastoreUtil.getFactOrDimtableStorageTableName;
-import static org.apache.lens.cube.metadata.MetastoreUtil.getStoragetableEndTimesKey;
-import static org.apache.lens.cube.metadata.MetastoreUtil.getStoragetableStartTimesKey;
+import static org.apache.lens.cube.metadata.MetastoreUtil.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.*;
 import static org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.*;
-import static org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode.PART_COL_DOES_NOT_EXIST;
-import static org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode.RANGE_NOT_ANSWERABLE;
-import static org.apache.lens.cube.parse.StorageUtil.joinWithAnd;
+import static org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode.*;
 
 import java.text.DateFormat;
 import java.text.ParseException;
@@ -36,10 +32,7 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.lens.cube.metadata.*;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCause;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode;
-import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipUpdatePeriodCode;
+import org.apache.lens.cube.parse.CandidateTablePruneCause.*;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
@@ -49,7 +42,6 @@ import org.apache.hadoop.util.ReflectionUtils;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
-
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -361,9 +353,9 @@ class StorageTableResolver implements ContextRewriter {
   private void resolveFactStoragePartitions(CubeQueryContext cubeql) throws LensException {
     // Find candidate tables wrt supported storages
     Iterator<CandidateFact> i = cubeql.getCandidateFacts().iterator();
-    Map<TimeRange, String> whereClauseForFallback = new LinkedHashMap<TimeRange, String>();
     while (i.hasNext()) {
       CandidateFact cfact = i.next();
+      Map<TimeRange, String> whereClauseForFallback = new LinkedHashMap<TimeRange, String>();
       List<FactPartition> answeringParts = new ArrayList<>();
       Map<String, SkipStorageCause> skipStorageCauses = skipStorageCausesPerFact.get(cfact.fact);
       if (skipStorageCauses == null) {
@@ -434,9 +426,6 @@ class StorageTableResolver implements ContextRewriter {
         cfact.incrementPartsQueried(rangeParts.size());
         answeringParts.addAll(rangeParts);
         cfact.getPartsQueried().addAll(rangeParts);
-        String rangeWhereClause = rangeWriter.getTimeRangeWhereClause(cubeql,
-          cubeql.getAliasForTableName(cubeql.getCube().getName()), rangeParts);
-        cfact.getRangeToWhereClause().put(range, joinWithAnd(rangeWhereClause, extraWhereClause.toString()));
       }
       if (!unsupportedTimeDims.isEmpty()) {
         log.info("Not considering fact table:{} as it doesn't support time dimensions: {}", cfact.fact,
@@ -483,24 +472,27 @@ class StorageTableResolver implements ContextRewriter {
       Set<String> storageTables = new LinkedHashSet<>();
       storageTables.addAll(minimalStorageTables.keySet());
       cfact.setStorageTables(storageTables);
-
       // Update range->storage->partitions with time range where clause
       for (TimeRange trange : cfact.getRangeToStoragePartMap().keySet()) {
-        Map<String, String> rangeToWhere = new HashMap<String, String>();
+        Map<String, String> rangeToWhere = new HashMap<>();
         for (Map.Entry<String, Set<FactPartition>> entry : minimalStorageTables.entrySet()) {
           String table = entry.getKey();
           Set<FactPartition> minimalParts = entry.getValue();
 
           LinkedHashSet<FactPartition> rangeParts = cfact.getRangeToStoragePartMap().get(trange).get(table);
-          LinkedHashSet<FactPartition> minimalPartsCopy = new LinkedHashSet<FactPartition>(minimalParts);
-          minimalPartsCopy.retainAll(rangeParts);
+          LinkedHashSet<FactPartition> minimalPartsCopy = Sets.newLinkedHashSet();
+
+          if (rangeParts != null) {
+            minimalPartsCopy.addAll(minimalParts);
+            minimalPartsCopy.retainAll(rangeParts);
+          }
           if (!StringUtils.isEmpty(whereClauseForFallback.get(trange))) {
-            rangeToWhere.put(
-              rangeWriter.getTimeRangeWhereClause(cubeql, cubeql.getAliasForTableName(cubeql.getCube().getName()),
-                minimalPartsCopy) + " and  " + whereClauseForFallback.get(trange), table);
+            rangeToWhere.put(table, "(("
+              + rangeWriter.getTimeRangeWhereClause(cubeql, cubeql.getAliasForTableName(cubeql.getCube().getName()),
+                minimalPartsCopy) + ") and  (" + whereClauseForFallback.get(trange) + "))");
           } else {
-            rangeToWhere.put(rangeWriter.getTimeRangeWhereClause(cubeql,
-              cubeql.getAliasForTableName(cubeql.getCube().getName()), minimalPartsCopy), table);
+            rangeToWhere.put(table, rangeWriter.getTimeRangeWhereClause(cubeql,
+              cubeql.getAliasForTableName(cubeql.getCube().getName()), minimalPartsCopy));
           }
         }
         cfact.getRangeToStorageWhereMap().put(trange, rangeToWhere);
@@ -592,8 +584,7 @@ class StorageTableResolver implements ContextRewriter {
     int lookAheadNumParts =
       conf.getInt(CubeQueryConfUtil.getLookAheadPTPartsKey(interval), CubeQueryConfUtil.DEFAULT_LOOK_AHEAD_PT_PARTS);
 
-    TimeRange.Iterable.Iterator iter = TimeRange.iterable(ceilFromDate, floorToDate, interval, 1)
-      .iterator();
+    TimeRange.Iterable.Iterator iter = TimeRange.iterable(ceilFromDate, floorToDate, interval, 1).iterator();
     // add partitions from ceilFrom to floorTo
     while (iter.hasNext()) {
       Date dt = iter.next();

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java b/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
index 87e4ce3..e7b9403 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/DateFactory.java
@@ -26,6 +26,10 @@ import java.util.Calendar;
 import java.util.Date;
 import java.util.HashMap;
 
+import org.apache.hadoop.util.StringUtils;
+
+import com.google.common.collect.Lists;
+
 public class DateFactory {
   private DateFactory() {
 
@@ -143,6 +147,7 @@ public class DateFactory {
   // Time Ranges
   public static final String LAST_HOUR_TIME_RANGE;
   public static final String TWO_DAYS_RANGE;
+  public static final String TWO_DAYS_RANGE_SPLIT_OVER_UPDATE_PERIODS;
   public static final String TWO_DAYS_RANGE_TTD;
   public static final String TWO_DAYS_RANGE_TTD_BEFORE_4_DAYS;
   public static final String TWO_DAYS_RANGE_TTD2;
@@ -192,5 +197,11 @@ public class DateFactory {
 
     // calculate LAST_HOUR_TIME_RANGE
     LAST_HOUR_TIME_RANGE = getTimeRangeString(HOURLY, -1, 0);
+
+    TWO_DAYS_RANGE_SPLIT_OVER_UPDATE_PERIODS = StringUtils.join(" OR ", Lists.newArrayList(
+      getTimeRangeString(getDateStringWithOffset(HOURLY, -48), getDateStringWithOffset(DAILY, -1)),
+      getTimeRangeString(getDateStringWithOffset(DAILY, 0), getDateStringWithOffset(HOURLY, 0)),
+      getTimeRangeString(getDateStringWithOffset(DAILY, -1), getDateStringWithOffset(DAILY, 0))
+    ));
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 3f01dbe..ad20ae1 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -512,7 +512,7 @@ public class CubeTestSetup {
       "No aggregateMsr", null, null, null));
     cubeMeasures.add(new ColumnMeasure(new FieldSchema("newmeasure", "bigint", "measure available  from now"),
       "New measure", null, null, null, NOW, null, 100.0));
-    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr15", "int", "first measure"), "Measure15", null, "SUM",
+    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr15", "int", "fifteenth measure"), "Measure15", null, "SUM",
       "RS"));
 
     cubeDimensions = new HashSet<CubeDimAttribute>();

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index a5886dc..5b44f95 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -50,6 +50,7 @@ import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
 import com.google.common.base.Splitter;
+import com.google.common.collect.Sets;
 import lombok.Getter;
 
 public class TestBaseCubeQueries extends TestQueryRewrite {
@@ -546,17 +547,24 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     assertEquals(ctx.getCandidateFactSets().size(), 1);
     assertEquals(ctx.getCandidateFactSets().iterator().next().size(), 1);
     CandidateFact cfact = ctx.getCandidateFactSets().iterator().next().iterator().next();
-    assertEquals(cfact.getRangeToWhereClause().size(), 2);
-    for(Map.Entry<TimeRange, String> entry: cfact.getRangeToWhereClause().entrySet()) {
+
+    assertEquals(cfact.getRangeToStoragePartMap().size(), 2);
+    Set<String> storages = Sets.newHashSet();
+    for(Map<String, String> entry: cfact.getRangeToStorageWhereMap().values()) {
+      storages.addAll(entry.keySet());
+    }
+    assertEquals(storages.size(), 1);
+    String storage = storages.iterator().next();
+    for(Map.Entry<TimeRange, Map<String, String>> entry: cfact.getRangeToStorageWhereMap().entrySet()) {
       if (entry.getKey().getPartitionColumn().equals("dt")) {
-        ASTNode parsed = HQLParser.parseExpr(entry.getValue());
+        ASTNode parsed = HQLParser.parseExpr(entry.getValue().get(storage));
         assertEquals(parsed.getToken().getType(), KW_AND);
-        assertTrue(entry.getValue().substring(((CommonToken) parsed.getToken()).getStopIndex() + 1).toLowerCase()
-          .contains(dTimeWhereClause));
-        assertFalse(entry.getValue().substring(0, ((CommonToken) parsed.getToken()).getStartIndex()).toLowerCase()
-          .contains("and"));
+        assertTrue(entry.getValue().get(storage).substring(((CommonToken) parsed.getToken()).getStopIndex() + 1)
+          .toLowerCase().contains(dTimeWhereClause));
+        assertFalse(entry.getValue().get(storage).substring(0, ((CommonToken) parsed.getToken()).getStartIndex())
+          .toLowerCase().contains("and"));
       } else if (entry.getKey().getPartitionColumn().equals("ttd")) {
-        assertFalse(entry.getValue().toLowerCase().contains("and"));
+        assertFalse(entry.getValue().get(storage).toLowerCase().contains("and"));
       } else {
         throw new LensException("Unexpected");
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/04f5a822/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 9a08735..f02cdb0 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -41,7 +41,6 @@ import org.apache.lens.server.api.LensServerAPITestUtil;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.time.DateUtils;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -58,7 +57,6 @@ import com.google.common.base.Splitter;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
-
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
@@ -410,7 +408,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
           return getWhereForMonthlyDailyAndHourly2monthsUnionQuery(storage);
         }
       };
-      try{
+      try {
         rewrite("select cityid as `City ID`, msr8, msr7 as `Third measure` "
           + "from testCube where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
         fail("Union feature is disabled, should have failed");
@@ -501,6 +499,35 @@ public class TestCubeRewriter extends TestQueryRewrite {
   }
 
   @Test
+  public void testMultiFactMultiStorage() throws ParseException, LensException {
+    Configuration conf = LensServerAPITestUtil.getConfiguration(
+      CubeQueryConfUtil.ENABLE_STORAGES_UNION, true,
+      CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1,C2",
+      getValidStorageTablesKey("testfact"), "C1_testFact,C2_testFact",
+      getValidUpdatePeriodsKey("testfact", "C1"), "HOURLY",
+      getValidUpdatePeriodsKey("testfact", "C2"), "DAILY",
+      getValidUpdatePeriodsKey("testfact2_raw", "C1"), "YEARLY",
+      getValidUpdatePeriodsKey("testfact2_raw", "C2"), "YEARLY");
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(HOURLY));
+    CubeTestSetup.getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(DAILY));
+    String whereCond = "zipcode = 'a' and cityid = 'b' and (" + TWO_DAYS_RANGE_SPLIT_OVER_UPDATE_PERIODS + ")";
+    String hqlQuery = rewrite("cube select zipcode, count(msr4), sum(msr15) from testCube where " + whereCond, conf);
+    System.out.println(hqlQuery);
+    String possibleStart1 = "SELECT COALESCE(mq1.zipcode, mq2.zipcode) zipcode, mq1.msr4 msr4, mq2.msr15 msr15 FROM ";
+    String possibleStart2 = "SELECT COALESCE(mq1.zipcode, mq2.zipcode) zipcode, mq2.msr4 msr4, mq1.msr15 msr15 FROM ";
+
+    assertTrue(hqlQuery.startsWith(possibleStart1) || hqlQuery.startsWith(possibleStart2));
+    compareContains(rewrite("cube select zipcode as `zipcode`, sum(msr15) as `msr15` from testcube where " + whereCond,
+      conf), hqlQuery);
+    compareContains(rewrite("cube select zipcode as `zipcode`, count(msr4) as `msr4` from testcube where " + whereCond,
+      conf), hqlQuery);
+    assertTrue(hqlQuery.endsWith("on mq1.zipcode <=> mq2.zipcode"));
+    // No time_range_in should be remaining
+    assertFalse(hqlQuery.contains("time_range_in"));
+    //TODO: handle having after LENS-813, also handle for order by and limit
+  }
+
+  @Test
   public void testCubeWhereQueryWithMultipleTables() throws Exception {
     Configuration conf = getConf();
     conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, true);
@@ -1120,18 +1147,18 @@ public class TestCubeRewriter extends TestQueryRewrite {
       new HashMap<String, List<CandidateTablePruneCause>>() {
         {
           put("statetable", Arrays.asList(CandidateTablePruneCause.noCandidateStorages(
-              new HashMap<String, SkipStorageCause>() {
-                {
-                  put("c1_statetable", new SkipStorageCause(SkipStorageCode.NO_PARTITIONS));
-                }
-              }))
+            new HashMap<String, SkipStorageCause>() {
+              {
+                put("c1_statetable", new SkipStorageCause(SkipStorageCode.NO_PARTITIONS));
+              }
+            }))
           );
           put("statetable_partitioned", Arrays.asList(CandidateTablePruneCause.noCandidateStorages(
-              new HashMap<String, SkipStorageCause>() {
-                {
-                  put("C3_statetable_partitioned", new SkipStorageCause(SkipStorageCode.UNSUPPORTED));
-                }
-              }))
+            new HashMap<String, SkipStorageCause>() {
+              {
+                put("C3_statetable_partitioned", new SkipStorageCause(SkipStorageCode.UNSUPPORTED));
+              }
+            }))
           );
         }
       }


[40/50] [abbrv] lens git commit: LENS-270 : The exception thrown for no candidate fact should contain only brief error

Posted by sh...@apache.org.
LENS-270 : The exception thrown for no candidate fact should contain only brief error


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/36166a2e
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/36166a2e
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/36166a2e

Branch: refs/heads/LENS-581
Commit: 36166a2e58a2a89bd97dc8595cb7920fbf4253d8
Parents: bf4c0be
Author: Sushil Mohanty <su...@apache.org>
Authored: Mon Dec 14 09:57:45 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Mon Dec 14 09:57:45 2015 +0530

----------------------------------------------------------------------
 .../NoCandidateFactAvailableException.java      | 48 ++++++++++++++++++++
 .../lens/cube/parse/CubeQueryContext.java       |  4 +-
 .../org/apache/lens/cube/parse/PruneCauses.java | 12 +++++
 .../lens/cube/parse/TestBaseCubeQueries.java    | 19 +++++++-
 .../lens/cube/parse/TestCubeRewriter.java       |  7 ++-
 .../cube/parse/TestDenormalizationResolver.java |  4 +-
 .../lens/cube/parse/TestTimeRangeResolver.java  |  4 +-
 7 files changed, 91 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
new file mode 100644
index 0000000..b2568ff
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.error;
+
+import org.apache.lens.cube.metadata.CubeFactTable;
+import org.apache.lens.cube.parse.PruneCauses;
+import org.apache.lens.server.api.error.LensException;
+
+
+public class NoCandidateFactAvailableException extends LensException {
+
+  private final PruneCauses<CubeFactTable> briefAndDetailedError;
+
+  public NoCandidateFactAvailableException(PruneCauses<CubeFactTable> briefAndDetailedError) {
+    super(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo(), briefAndDetailedError.getBriefCause());
+    this.briefAndDetailedError = briefAndDetailedError;
+  }
+
+  public PruneCauses.BriefAndDetailedError getJsonMessage() {
+    return briefAndDetailedError.toJsonObject();
+  }
+
+  @Override
+  public int compareTo(LensException e) {
+    //Compare the max CandidateTablePruneCode coming from different instances.
+    if (e instanceof NoCandidateFactAvailableException) {
+      return briefAndDetailedError.getMaxCause().compareTo(
+               ((NoCandidateFactAvailableException) e).briefAndDetailedError.getMaxCause());
+    }
+    return super.compareTo(e);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index cf114c9..f75a6b9 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -32,6 +32,7 @@ import java.io.IOException;
 import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.server.api.error.LensException;
@@ -845,7 +846,8 @@ public class CubeQueryContext implements TrackQueriedColumns {
             }
           }
         }
-        throw new LensException(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo(), reason);
+        log.error("Query rewrite failed due to NO_CANDIDATE_FACT_AVAILABLE, Cause {}", factPruningMsgs.toJsonObject());
+        throw new NoCandidateFactAvailableException(factPruningMsgs);
       }
     }
     return facts;

http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
index 7a92b3b..9b5a52f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
@@ -41,6 +41,8 @@ public class PruneCauses<T extends AbstractCubeTable> extends HashMap<T, List<Ca
   private final HashMap<CandidateTablePruneCause, List<T>> reversed = reverse();
   @Getter(lazy = true)
   private final HashMap<String, List<CandidateTablePruneCause>> compact = computeCompact();
+  @Getter(lazy = true)
+  private final CandidateTablePruneCode maxCause  = computeMaxCause();
 
   private HashMap<String, List<CandidateTablePruneCause>> computeCompact() {
     HashMap<String, List<CandidateTablePruneCause>> detailedMessage = Maps.newHashMap();
@@ -81,6 +83,16 @@ public class PruneCauses<T extends AbstractCubeTable> extends HashMap<T, List<Ca
     return new BriefAndDetailedError(getBriefCause(), getCompact());
   }
 
+  private CandidateTablePruneCode computeMaxCause() {
+    CandidateTablePruneCode maxCause = CandidateTablePruneCode.values()[0];
+    for (CandidateTablePruneCause cause : getReversed().keySet()) {
+      if (cause.getCause().compareTo(maxCause) > 0) {
+        maxCause = cause.getCause();
+      }
+    }
+    return maxCause;
+  }
+
   public String getBriefCause() {
     CandidateTablePruneCode maxCause = CandidateTablePruneCode.values()[0];
     for (CandidateTablePruneCause cause : getReversed().keySet()) {

http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 97c6d08..a5886dc 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -34,6 +34,7 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
@@ -67,6 +68,18 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
   }
 
   @Test
+  public void testNoCandidateFactAvailableExceptionCompareTo() throws Exception {
+    //maxCause : COLUMN_NOT_FOUND, Ordinal : 9
+    NoCandidateFactAvailableException ne1 =(NoCandidateFactAvailableException)
+            getLensExceptionInRewrite("select dim1, test_time_dim, msr3, msr13 from basecube where "
+            + TWO_DAYS_RANGE, conf);
+    //maxCause : FACT_NOT_AVAILABLE_IN_RANGE, Ordinal : 1
+    NoCandidateFactAvailableException ne2 = (NoCandidateFactAvailableException)
+            getLensExceptionInRewrite("cube select dim1 from " + cubeName + " where " + LAST_YEAR_RANGE, getConf());
+    assertEquals(ne1.compareTo(ne2), 8);
+  }
+
+  @Test
   public void testColumnErrors() throws Exception {
     LensException e;
 
@@ -78,7 +91,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
       + TWO_DAYS_RANGE, conf);
     assertEquals(e.getErrorCode(),
         LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
-    PruneCauses.BriefAndDetailedError pruneCauses = extractPruneCause(e);
+    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e;
+    PruneCauses.BriefAndDetailedError pruneCauses = ne.getJsonMessage();
     String regexp = String.format(CandidateTablePruneCause.CandidateTablePruneCode.COLUMN_NOT_FOUND.errorFormat,
       "Column Sets: (.*?)", "queriable together");
     Matcher matcher = Pattern.compile(regexp).matcher(pruneCauses.getBrief());
@@ -494,7 +508,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
     LensException exc =
       getLensExceptionInRewrite("cube select msr12 from basecube where " + TWO_DAYS_RANGE, conf);
-    PruneCauses.BriefAndDetailedError pruneCause = extractPruneCause(exc);
+    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) exc;
+    PruneCauses.BriefAndDetailedError pruneCause = ne.getJsonMessage();
     assertTrue(pruneCause.getBrief().contains("Missing partitions"));
     assertEquals(pruneCause.getDetails().get("testfact2_base").iterator().next().getCause(), MISSING_PARTITIONS);
     assertEquals(pruneCause.getDetails().get("testfact2_base").iterator().next().getMissingPartitions().size(), 1);

http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index 0f05556..802ff42 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -32,6 +32,7 @@ import java.text.SimpleDateFormat;
 import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCause;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode;
@@ -186,7 +187,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     LensException th = getLensExceptionInRewrite(
       "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
     assertEquals(th.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
-    PruneCauses.BriefAndDetailedError pruneCauses = extractPruneCause(th);
+    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) th;
+    PruneCauses.BriefAndDetailedError pruneCauses = ne.getJsonMessage();
     int endIndex = MISSING_PARTITIONS.errorFormat.length() - 3;
     assertEquals(
       pruneCauses.getBrief().substring(0, endIndex),
@@ -1031,7 +1033,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     LensException e = getLensExceptionInRewrite(
       "select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf);
     assertEquals(e.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
-    PruneCauses.BriefAndDetailedError pruneCauses = extractPruneCause(e);
+    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e;
+    PruneCauses.BriefAndDetailedError pruneCauses = ne.getJsonMessage();
 
     assertEquals(
       pruneCauses.getBrief().substring(0, MISSING_PARTITIONS.errorFormat.length() - 3),

http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
index 36c1dba..af9daad 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
@@ -24,6 +24,7 @@ import static org.apache.lens.cube.parse.CubeTestSetup.*;
 
 import java.util.*;
 
+import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.server.api.error.LensException;
 
@@ -149,7 +150,8 @@ public class TestDenormalizationResolver extends TestQueryRewrite {
     TestCubeRewriter.compareQueries(hqlQuery, expected);
     LensException e = getLensExceptionInRewrite(
       "select dim2big2, max(msr3)," + " msr2 from testCube" + " where " + TWO_DAYS_RANGE, tconf);
-    PruneCauses.BriefAndDetailedError error = extractPruneCause(e);
+    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e;
+    PruneCauses.BriefAndDetailedError error = ne.getJsonMessage();
     Assert.assertEquals(error.getBrief(), CandidateTablePruneCode.NO_CANDIDATE_STORAGES.errorFormat);
 
     HashMap<String, List<CandidateTablePruneCause>> details = error.getDetails();

http://git-wip-us.apache.org/repos/asf/lens/blob/36166a2e/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
index da0e4f4..2ac837d 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestTimeRangeResolver.java
@@ -29,6 +29,7 @@ import static org.testng.Assert.assertTrue;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.lens.cube.error.NoCandidateFactAvailableException;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.conf.Configuration;
@@ -66,7 +67,8 @@ public class TestTimeRangeResolver extends TestQueryRewrite {
     LensException e =
       getLensExceptionInRewrite("cube select msr2 from " + cubeName + " where " + LAST_YEAR_RANGE,
         getConf());
-    PruneCauses.BriefAndDetailedError causes = extractPruneCause(e);
+    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e;
+    PruneCauses.BriefAndDetailedError causes = ne.getJsonMessage();
     assertTrue(causes.getBrief().contains("Columns [msr2] are not present in any table"));
     assertEquals(causes.getDetails().size(), 2);