You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by pr...@apache.org on 2017/04/12 10:23:18 UTC

[2/3] lens git commit: Merge with master

Merge with master


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/3ba2fad1
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/3ba2fad1
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/3ba2fad1

Branch: refs/heads/lens-1381
Commit: 3ba2fad1091769a048a62d5b7bed3cebad8548e1
Parents: d45c538 186f03f
Author: Rajat Khandelwal <ra...@gmail.com>
Authored: Wed Apr 12 15:50:14 2017 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Wed Apr 12 15:50:14 2017 +0530

----------------------------------------------------------------------
 .../main/resources/checkstyle-suppressions.xml  | 33 +++++++++++++++
 checkstyle/src/main/resources/checkstyle.xml    | 12 ++++--
 checkstyle/src/main/resources/suppressions.xml  | 32 ---------------
 contrib/clients/python/lens/client/main.py      |  7 +++-
 contrib/clients/python/lens/client/query.py     | 43 +++++++++++++++-----
 contrib/clients/python/setup.py                 |  2 +
 contrib/clients/python/test/test_lensclient.py  | 13 ++++--
 .../java/org/apache/lens/api/ToXMLString.java   |  1 -
 .../lens/api/error/LensCommonErrorCode.java     |  2 +-
 .../apache/lens/api/error/LensHttpStatus.java   |  2 +-
 .../lens/api/metastore/SchemaTraverser.java     | 23 ++++++++---
 .../apache/lens/doc/TestGenerateConfigDoc.java  |  2 +-
 .../apache/lens/cube/metadata/CubeColumn.java   |  2 +-
 .../lens/cube/metadata/CubeMetastoreClient.java | 22 +++++-----
 .../apache/lens/cube/metadata/Dimension.java    |  4 +-
 .../lens/cube/metadata/MetastoreUtil.java       |  1 -
 .../org/apache/lens/cube/metadata/Storage.java  |  2 +
 .../apache/lens/cube/parse/AliasReplacer.java   |  4 --
 .../apache/lens/cube/parse/ColumnResolver.java  |  4 --
 .../lens/cube/parse/CubeQueryRewriter.java      | 17 ++++----
 .../cube/parse/DenormalizationResolver.java     |  1 +
 .../apache/lens/cube/parse/GroupbyResolver.java | 11 ++---
 .../apache/lens/cube/parse/JoinResolver.java    |  4 --
 .../lens/cube/parse/LeastPartitionResolver.java |  4 --
 .../cube/parse/LightestDimensionResolver.java   |  5 ---
 .../lens/cube/parse/LightestFactResolver.java   |  4 --
 .../cube/parse/MaxCoveringFactResolver.java     |  2 +-
 .../lens/cube/parse/SelectPhraseContext.java    |  2 +-
 .../lens/cube/parse/TimerangeResolver.java      |  3 --
 .../cube/metadata/TestCubeMetastoreClient.java  |  2 +-
 .../apache/lens/cube/parse/CubeTestSetup.java   |  1 -
 .../cube/parse/TestBetweenTimeRangeWriter.java  |  1 +
 .../lens/cube/parse/TestCubeRewriter.java       | 10 ++---
 .../lens/driver/es/ASTTraverserForES.java       |  6 +--
 .../client/jest/JestResultSetTransformer.java   |  4 +-
 .../lens/driver/es/translator/ESVisitor.java    |  2 +-
 .../org/apache/lens/driver/hive/HiveDriver.java |  2 +-
 .../apache/lens/driver/jdbc/TestJdbcDriver.java |  2 +-
 .../core/helpers/SavedQueryResourceHelper.java  |  6 +--
 .../core/helpers/ScheduleResourceHelper.java    |  8 ++--
 .../regression/client/ITSavedQueryTests.java    |  2 +-
 .../server/api/events/SchedulerAlarmEvent.java  |  2 +-
 .../org/apache/lens/server/LensServices.java    |  8 ++--
 .../metastore/CubeMetastoreServiceImpl.java     |  1 -
 .../lens/server/scheduler/ScheduleResource.java | 18 ++++----
 .../server/user/UserConfigLoaderFactory.java    |  8 ++--
 .../apache/lens/server/LensServerTestUtil.java  |  6 +--
 .../org/apache/lens/server/TestServerMode.java  | 24 +++++------
 .../lens/server/query/TestEventService.java     |  4 +-
 .../TestQueryIndependenceFromSessionClose.java  |  6 ++-
 .../TotalQueryCostCeilingConstraintTest.java    |  2 +-
 .../lens/server/query/retry/QueryRetryTest.java |  6 +--
 .../server/stats/TestLogStatisticsStore.java    |  2 +-
 pom.xml                                         | 21 +++++++++-
 tools/conf/server/lens-site.xml                 |  6 +++
 55 files changed, 236 insertions(+), 188 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-api/src/main/java/org/apache/lens/api/ToXMLString.java
----------------------------------------------------------------------
diff --cc lens-api/src/main/java/org/apache/lens/api/ToXMLString.java
index 746a82b,e74adc9..0058f20
--- a/lens-api/src/main/java/org/apache/lens/api/ToXMLString.java
+++ b/lens-api/src/main/java/org/apache/lens/api/ToXMLString.java
@@@ -24,10 -24,6 +24,9 @@@ import java.util.HashMap
  import java.util.Map;
  
  import javax.xml.bind.*;
 +import javax.xml.bind.annotation.XmlRootElement;
- import javax.xml.bind.annotation.XmlSeeAlso;
 +import javax.xml.bind.annotation.XmlType;
 +import javax.xml.namespace.QName;
  
  import org.apache.lens.api.jaxb.LensJAXBContext;
  

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-api/src/main/java/org/apache/lens/api/metastore/SchemaTraverser.java
----------------------------------------------------------------------
diff --cc lens-api/src/main/java/org/apache/lens/api/metastore/SchemaTraverser.java
index 157ad71,0000000..76cb8b9
mode 100644,000000..100644
--- a/lens-api/src/main/java/org/apache/lens/api/metastore/SchemaTraverser.java
+++ b/lens-api/src/main/java/org/apache/lens/api/metastore/SchemaTraverser.java
@@@ -1,58 -1,0 +1,71 @@@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements.  See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership.  The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License.  You may obtain a copy of the License at
++ *
++ *   http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied.  See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ */
 +package org.apache.lens.api.metastore;
 +
 +import java.io.File;
 +import java.io.FilenameFilter;
- import java.io.IOException;
 +import java.util.Map;
 +import java.util.function.BiConsumer;
 +
- import javax.xml.bind.JAXBException;
- 
- import org.apache.lens.api.jaxb.LensJAXBContext;
- 
 +import com.google.common.collect.Maps;
 +
 +/*
 + * Created on 07/03/17.
 + */
 +public class SchemaTraverser implements Runnable {
 +  final File parent;
 +  final Map<String, Class<?>> types = Maps.newLinkedHashMap();
 +  private final SchemaEntityProcessor action;
 +  {
 +    types.put("storages", XStorage.class);
 +    types.put("cubes/base", XBaseCube.class);
 +    types.put("cubes/derived", XDerivedCube.class);
 +    types.put("dimensions", XDimension.class);
 +    types.put("facts", XFactTable.class);
 +    types.put("dimtables", XDimensionTable.class);
 +    types.put("dimensiontables", XDimensionTable.class);
 +    types.put("dimensiontables", XDimensionTable.class);
 +    types.put("segmentations", XSegmentation.class);
 +  }
 +  private static final FilenameFilter XML_FILTER = (dir, name) -> name.endsWith(".xml");
 +
 +  public interface SchemaEntityProcessor extends BiConsumer<File, Class<?>> {
 +  }
 +
 +  public SchemaTraverser(File parent, SchemaEntityProcessor action) {
 +    this.parent = parent;
 +    this.action = action;
 +  }
 +
 +  @Override
 +  public void run() {
 +    for (Map.Entry<String, Class<?>> entry : types.entrySet()) {
 +      File f = new File(parent, entry.getKey());
 +      if (f.exists()) {
 +        assert f.isDirectory();
 +        File[] files = f.listFiles(XML_FILTER);
 +        if (files != null) {
 +          for (File entityFile : files) {
 +            action.accept(entityFile.getAbsoluteFile(), entry.getValue());
 +          }
 +        }
 +      }
 +    }
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --cc lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index 7608a43,087c203..b445447
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@@ -307,74 -290,18 +307,74 @@@ public class CubeMetastoreClient 
      }
    }
  
+   public void createCubeFactTable(String cubeName, String factName, List<FieldSchema> columns,
+     Map<String, Set<UpdatePeriod>> storageAggregatePeriods, double weight, Map<String, String> properties,
+     Map<String, StorageTableDesc> storageTableDescs, Map<String, Map<UpdatePeriod, String>> storageUpdatePeriodMap)
+     throws LensException {
+     CubeFactTable factTable = new CubeFactTable(cubeName, factName, columns, storageAggregatePeriods, weight,
+       properties, storageUpdatePeriodMap);
+     createCubeTable(factTable, storageTableDescs);
+     // do a get to update cache
+     getCubeFact(factName);
+ 
+   }
  
 +  public <T extends Equals & HashCode & ToString> void createEntity(T entity) throws LensException {
 +    if (entity instanceof XStorage) {
 +      createStorage((XStorage) entity);
 +    } else if  (entity instanceof XCube) {
 +      createCube((XCube)entity);
 +    } else if (entity instanceof XDimension) {
 +      createDimension((XDimension) entity);
 +    } else if (entity instanceof XFactTable) {
 +      createCubeFactTable((XFactTable) entity);
 +    } else if (entity instanceof XDimensionTable) {
 +      createCubeDimensionTable((XDimensionTable) entity);
 +    } else if (entity instanceof XSegmentation) {
 +      createSegmentation((XSegmentation) entity);
 +    } else {
 +      throw new LensException("Unable to create entity " + entity + " as it's unrecognizable: "+ entity.getClass());
 +    }
 +  }
 +
 +  public <T extends Equals & HashCode & ToString> void updateEntity(String name, T entity)
 +    throws LensException, HiveException {
 +    if (entity instanceof XStorage) {
 +      alterStorage((XStorage) entity);
 +    } else if  (entity instanceof XCube) {
 +      alterCube((XCube)entity);
 +    } else if (entity instanceof XDimension) {
 +      alterDimension((XDimension) entity);
 +    } else if (entity instanceof XFactTable) {
 +      alterCubeFactTable((XFactTable) entity);
 +    } else if (entity instanceof XDimensionTable) {
 +      alterCubeDimensionTable((XDimensionTable) entity);
 +    } else if (entity instanceof XSegmentation) {
 +      alterSegmentation((XSegmentation) entity);
 +    } else {
 +      throw new LensException("Unable to alter entity " + entity + " as it's unrecognizable: " + entity.getClass());
 +    }
 +  }
 +
 +
 +  public static Map<String, String> addFactColStartTimePropertyToFactProperties(XFactTable fact) {
 +    Map<String, String> props = new HashMap<String, String>();
 +    props.putAll(JAXBUtils.mapFromXProperties(fact.getProperties()));
 +    props.putAll(JAXBUtils.columnStartAndEndTimeFromXColumns(fact.getColumns()));
 +    return props;
 +  }
 +  public void createCubeFactTable(XFactTable fact) throws LensException {
 +    createCubeFactTable(fact.getCubeName(),
 +      fact.getName(),
 +      JAXBUtils.fieldSchemaListFromColumns(fact.getColumns()),
 +      JAXBUtils.getFactUpdatePeriodsFromStorageTables(fact.getStorageTables()),
 +      fact.getWeight(),
 +      addFactColStartTimePropertyToFactProperties(fact),
 +      JAXBUtils.tableDescPrefixMapFromXStorageTables(fact.getStorageTables()),
 +      JAXBUtils.storageTablePrefixMapOfStorage(fact.getStorageTables()));
 +  }
-   public void createCubeFactTable(String cubeName, String factName, List<FieldSchema> columns,
-     Map<String, Set<UpdatePeriod>> storageAggregatePeriods, double weight, Map<String, String> properties,
-     Map<String, StorageTableDesc> storageTableDescs, Map<String, Map<UpdatePeriod, String>> storageUpdatePeriodMap)
-     throws LensException {
-     CubeFactTable factTable = new CubeFactTable(cubeName, factName, columns, storageAggregatePeriods, weight,
-       properties, storageUpdatePeriodMap);
-     createCubeTable(factTable, storageTableDescs);
-     // do a get to update cache
-     getCubeFact(factName);
- 
-   }
 +
 +
    /**
     * In-memory storage of {@link PartitionTimeline} objects for each valid
     * storagetable-updateperiod-partitioncolumn tuple. also simultaneously stored in metastore table of the

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
----------------------------------------------------------------------
diff --cc lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
index 57d4502,4e350c8..1e8621d
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
@@@ -595,5 -595,5 +595,4 @@@ public class MetastoreUtil 
      return MetastoreUtil.getFactKeyPrefix(factTableName) + "." + storageName + "." + updatePeriod;
    }
  
--
  }

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-cube/src/main/java/org/apache/lens/cube/parse/ColumnResolver.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
----------------------------------------------------------------------
diff --cc lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
index 5713069,ea2eb7e..300d134
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
@@@ -135,75 -135,58 +135,74 @@@ public class CubeQueryRewriter 
     */
    private void setupRewriters() {
      // Resolve columns - the column alias and table alias
-     rewriters.add(new ColumnResolver(conf));
+     rewriters.add(new ColumnResolver());
      // Rewrite base trees (groupby, having, orderby, limit) using aliases
-     rewriters.add(new AliasReplacer(conf));
- 
+     rewriters.add(new AliasReplacer());
      ExpressionResolver exprResolver = new ExpressionResolver();
 -    DenormalizationResolver denormResolver = new DenormalizationResolver(conf);
 +    DenormalizationResolver denormResolver = new DenormalizationResolver();
      CandidateTableResolver candidateTblResolver = new CandidateTableResolver();
      StorageTableResolver storageTableResolver = new StorageTableResolver(conf);
 +
 +    // Phase 1 of exprResolver: Resolve expressions
      rewriters.add(exprResolver);
 -    // De-normalized columns resolved
 +    // Phase 1 of denormResolver: De-normalized columns resolved
      rewriters.add(denormResolver);
      // Resolve time ranges
-     rewriters.add(new TimerangeResolver(conf));
+     rewriters.add(new TimerangeResolver());
 -    // Resolve candidate fact tables and dimension tables for columns queried
 +    // Phase 1 of candidateTblResolver: Resolve candidate storages and dimension tables for columns queried
      rewriters.add(candidateTblResolver);
      // Resolve aggregations and generate base select tree
      rewriters.add(new AggregateResolver());
      rewriters.add(new GroupbyResolver(conf));
 +    //validate fields queryability (in case of derived cubes setup)
      rewriters.add(new FieldValidator());
      // Resolve joins and generate base join tree
-     rewriters.add(new JoinResolver(conf));
+     rewriters.add(new JoinResolver());
 -    // Do col life validation
 -    rewriters.add(new TimeRangeChecker(conf));
 -    // Resolve candidate fact tables and dimension tables for columns included
 +    // Do col life validation for the time range(s) queried
 +    rewriters.add(new ColumnLifetimeChecker());
 +    // Phase 1 of storageTableResolver: Validate and prune candidate storages
 +    rewriters.add(storageTableResolver);
 +    // Phase 2 of candidateTblResolver: Resolve candidate storages and dimension tables for columns included
      // in join and denorm resolvers
      rewriters.add(candidateTblResolver);
 +    // Find Union and Join combinations over Storage Candidates that can answer the queried time range(s) and all
 +    // queried measures
 +    rewriters.add(new CandidateCoveringSetsResolver());
  
 -    // Phase 1: resolve fact tables.
 -    rewriters.add(storageTableResolver);
 +    // If lightest fact first option is enabled for this driver (via lens.cube.query.pick.lightest.fact.first = true),
 +    // run LightestFactResolver and keep only the lighted combination(s) generated by CandidateCoveringSetsResolver
      if (lightFactFirst) {
        // Prune candidate tables for which denorm column references do not exist
        rewriters.add(denormResolver);
 -      // Prune candidate facts without any valid expressions
 +      // Phase 2 of exprResolver:Prune candidate facts without any valid expressions
        rewriters.add(exprResolver);
 +      // Pick the least cost combination(s) (and prune others) out of a set of combinations produced
 +      // by CandidateCoveringSetsResolver
-       rewriters.add(new LightestFactResolver(conf));
+       rewriters.add(new LightestFactResolver());
      }
 -    // Phase 2: resolve fact table partitions.
 +
 +    // Phase 2 of storageTableResolver: resolve storage table partitions.
      rewriters.add(storageTableResolver);
 +    // In case partial data is allowed (via lens.cube.query.fail.if.data.partial = false) and there are many
 +    // combinations with partial data, pick the one that covers the maximum part of time ranges(s) queried
      rewriters.add(new MaxCoveringFactResolver(conf));
 -    // Phase 3: resolve dimension tables and partitions.
 +    // Phase 3 of storageTableResolver:  resolve dimension tables and partitions.
      rewriters.add(storageTableResolver);
      // Prune candidate tables for which denorm column references do not exist
 +    //TODO union: phase 2 of denormResolver needs to be moved before CoveringSetResolver.. check if this makes sense
      rewriters.add(denormResolver);
 -    // Prune candidate facts without any valid expressions
 +    // Phase 2 of exprResolver : Prune candidate facts without any valid expressions
      rewriters.add(exprResolver);
 -    // We can have LightestFactResolver before LeastPartitionResolver - that says
 -    // "if two facts have the same least weight, then the fact with least number of time partitions queried will be
 -    // picked". This will be useful, if users did not set fact weights.
 +
      if (!lightFactFirst) {
 +      // Pick the least cost combination(s) (and prune others) out of a set of combinations produced
 +      // by CandidateCoveringSetsResolver
-       rewriters.add(new LightestFactResolver(conf));
+       rewriters.add(new LightestFactResolver());
      }
 +    // if two combinations have the same least weight/cost, then the combination with least number of time partitions
 +    // queried will be picked. Rest of the combinations will be pruned
-     rewriters.add(new LeastPartitionResolver(conf));
-     rewriters.add(new LightestDimensionResolver(conf));
+     rewriters.add(new LeastPartitionResolver());
+     rewriters.add(new LightestDimensionResolver());
    }
  
    public CubeQueryContext rewrite(ASTNode astnode) throws LensException {

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --cc lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index 76e5f23,cb26878..e5cf916
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@@ -138,9 -161,10 +138,10 @@@ public class DenormalizationResolver im
        return null;
      }
  
 -    public Set<Dimension> rewriteDenormctx(CubeQueryContext cubeql, CandidateFact cfact, Map<Dimension,
 -      CandidateDim> dimsToQuery, boolean replaceFact) throws LensException {
 +    Set<Dimension> rewriteDenormctx(CubeQueryContext cubeql,
 +      StorageCandidate sc, Map<Dimension, CandidateDim> dimsToQuery, boolean replaceFact) throws LensException {
        Set<Dimension> refTbls = new HashSet<>();
 -      log.info("Doing denorm changes for fact :{}", cfact);
++      log.info("Doing denorm changes for fact :{}", sc);
  
        if (!tableToRefCols.isEmpty()) {
          // pick referenced columns for fact

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
----------------------------------------------------------------------
diff --cc lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
index c9dc7b2,052b87a..1b30c0b
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
@@@ -47,15 -45,16 +47,16 @@@ class GroupbyResolver implements Contex
    private final boolean selectPromotionEnabled;
    private final boolean groupbyPromotionEnabled;
  
-   public GroupbyResolver(Configuration conf) {
-     selectPromotionEnabled = conf
-       .getBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, CubeQueryConfUtil.DEFAULT_ENABLE_SELECT_TO_GROUPBY);
-     groupbyPromotionEnabled = conf
-       .getBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, CubeQueryConfUtil.DEFAULT_ENABLE_GROUP_BY_TO_SELECT);
+   GroupbyResolver(Configuration conf) {
+     selectPromotionEnabled =
+       conf.getBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, CubeQueryConfUtil.DEFAULT_ENABLE_SELECT_TO_GROUPBY);
+     groupbyPromotionEnabled =
+       conf.getBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT,
+         CubeQueryConfUtil.DEFAULT_ENABLE_GROUP_BY_TO_SELECT);
    }
  
 -  private void promoteSelect(CubeQueryContext cubeql, List<SelectPhraseContext> selectExprs,
 -    List<String> groupByExprs) throws LensException {
 +  private void promoteSelect(CubeQueryContext cubeql, List<SelectPhraseContext> selectExprs, List<String> groupByExprs)
 +    throws LensException {
      if (!selectPromotionEnabled) {
        return;
      }

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --cc lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index 0370964,fce1662..02e3dc7
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@@ -43,14 -42,8 +42,11 @@@ import lombok.extern.slf4j.Slf4j
  class JoinResolver implements ContextRewriter {
    private Map<AbstractCubeTable, JoinType> tableJoinTypeMap;
    private AbstractCubeTable target;
 -  private HashMap<Dimension, List<JoinChain>> dimensionInJoinChain = new HashMap<Dimension, List<JoinChain>>();
 +  /**
 +   * Dimension as key and all the participating join chains for this dimension as value.
 +   */
 +  private HashMap<Dimension, List<JoinChain>> dimensionToJoinChainsMap = new HashMap<Dimension, List<JoinChain>>();
  
-   public JoinResolver(Configuration conf) {
-   }
- 
    @Override
    public void rewriteContext(CubeQueryContext cubeql) throws LensException {
      tableJoinTypeMap = new HashMap<AbstractCubeTable, JoinType>();

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
----------------------------------------------------------------------
diff --cc lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
index 153df24,0bc7f82..a9bd164
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
@@@ -18,15 -18,11 +18,13 @@@
   */
  package org.apache.lens.cube.parse;
  
 -import java.util.*;
 +import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.Iterator;
 +import java.util.Map;
  
 -import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
  import org.apache.lens.server.api.error.LensException;
  
- import org.apache.hadoop.conf.Configuration;
- 
  import lombok.extern.slf4j.Slf4j;
  
  /**

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --cc lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 860db28,9b29083..fe13de1
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@@ -525,16 -511,1143 +525,15 @@@ public class CubeTestSetup 
      return expected.toString();
    }
  
-   private Set<ExprColumn> exprs;
 -  Set<ExprColumn> exprs;
 -
 -  private void createCube(CubeMetastoreClient client) throws HiveException, ParseException, LensException {
 -    cubeMeasures = new HashSet<CubeMeasure>();
 -    Map<String, String> tags = new HashMap<>();
 -    tags.put(MetastoreConstants.MEASURE_DATACOMPLETENESS_TAG, "tag1");
 -    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr1", "int", "first measure"), null, null, null, null, null,
 -            null, null, null, null, tags));
 -    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr2", "float", "second measure"), "Measure2", null, "SUM",
 -      "RS"));
 -    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr21", "float", "second measure"), "Measure22", null, "SUM",
 -      "RS"));
 -    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr22", "float", "second measure"), "Measure22", null, "SUM",
 -      "RS"));
 -    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr3", "double", "third measure"), "Measure3", null, "MAX",
 -      null));
 -    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr4", "bigint", "fourth measure"), "Measure4", null, "COUNT",
 -      null));
 -    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr9", "bigint", "ninth measure"), null, null, null, null,
 -            null, null, null, null, null, tags));
 -    cubeMeasures.add(new ColumnMeasure(new FieldSchema("noAggrMsr", "bigint", "measure without a default aggregate"),
 -      "No aggregateMsr", null, null, null));
 -    cubeMeasures.add(new ColumnMeasure(new FieldSchema("newmeasure", "bigint", "measure available  from now"),
 -      "New measure", null, null, null, NOW, null, 100.0));
 -    cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr15", "int", "fifteenth measure"), "Measure15", null, "SUM",
 -      "RS"));
 -
 -    cubeDimensions = new HashSet<CubeDimAttribute>();
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("d_time", "timestamp", "d time")));
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("processing_time", "timestamp", "processing time")));
 -    List<CubeDimAttribute> locationHierarchy = new ArrayList<CubeDimAttribute>();
 -    locationHierarchy.add(new BaseDimAttribute(new FieldSchema("zipcode", "int", "zip")));
 -    locationHierarchy.add(new BaseDimAttribute(new FieldSchema("cityid", "int", "city")));
 -    locationHierarchy.add(new BaseDimAttribute(new FieldSchema("stateid", "int", "state")));
 -    locationHierarchy.add(new BaseDimAttribute(new FieldSchema("countryid", "int", "country")));
 -    List<String> regions = Arrays.asList("APAC", "EMEA", "USA");
 -    locationHierarchy.add(new BaseDimAttribute(new FieldSchema("regionname", "string", "region"), "regionname", null,
 -      null, null, null, regions));
 -
 -    cubeDimensions.add(new HierarchicalDimAttribute("location", "Location hierarchy", locationHierarchy));
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("dim1", "string", "basedim")));
 -    // Added for ambiguity test
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("ambigdim1", "string", "used in testColumnAmbiguity")));
 -    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("dim2", "int", "ref dim"), "dim2 refer",
 -      "dim2chain", "id", null, null, 0.0));
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("cdim2", "int", "ref dim"), "Dim2 refer", NOW, null, null));
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("urdimid", "int", "ref dim"), "urdim refer",
 -      null, null, 10.0));
 -    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("unreachableName", "string", ""), "urdim name",
 -      "unreachableDim_chain", "name", null, null, 10.0));
 -    // denormalized reference
 -    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("dim2big1", "bigint", "ref dim"), "dim2 refer",
 -      "dim2chain", "bigid1", null, null, 0.0));
 -    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("dim2big2", "bigint", "ref dim"), "dim2 refer",
 -      "dim2chain", "bigid2", null, null, 0.0));
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("dim2bignew", "bigint", "ref dim"), "Dim2 refer",
 -      NOW, null, null));
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("test_time_dim_hour_id", "int", "ref dim"),
 -      "Timedim reference", null, null, null));
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("test_time_dim_day_id", "int", "ref dim"),
 -      "Timedim reference", null, null, null));
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("test_time_dim_hour_id2", "int", "ref dim")));
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("test_time_dim_day_id2", "int", "ref dim")));
 -    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("testDim3id", "string", "direct id to testdim3"),
 -      "dim3 refer", "dim3chain", "id", null, null, 0.0));
 -    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("cityname", "string", "city name"),
 -      "city name", "cubecity", "name", null, null, 0.0));
 -    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("statename_cube", "string", "state name"),
 -      "state name", "cubestate", "name", null, null, 0.0));
 -    List<ChainRefCol> references = new ArrayList<>();
 -    references.add(new ChainRefCol("timedatechain1", "full_date"));
 -    references.add(new ChainRefCol("timehourchain1", "full_hour"));
 -    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("test_time_dim", "date", "ref dim"),
 -      "Timedim full date", references, null, null, null, null));
 -    List<ChainRefCol> chainRefs = new ArrayList<>();
 -    chainRefs.add(new ChainRefCol("timehourchain2", "full_hour"));
 -    chainRefs.add(new ChainRefCol("timedatechain2", "full_date"));
 -    cubeDimensions.add(new ReferencedDimAttribute(new FieldSchema("test_time_dim2", "date", "chained dim"),
 -      "Timedim full date", chainRefs, null, null, null, null));
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("cityid1", "int", "id to city"),
 -      "City1", null, null, null));
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("cityid2", "int", "id to city"),
 -      "City2", null, null, null));
 -    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("concatedcitystate", "string", "citystate"),
 -      "CityState", null, null, null));
 -
 -    Map<String, JoinChain> joinChains = new HashMap<>();
 -    addCubeChains(joinChains, TEST_CUBE_NAME);
 -
 -    exprs = new HashSet<ExprColumn>();
 -    exprs.add(new ExprColumn(new FieldSchema("avgmsr", "double", "avg measure"), "Avg Msr", "avg(msr1 + msr2)"));
 -    exprs.add(new ExprColumn(new FieldSchema("singlecolmsr2expr", "double", "measure2"), "Msr2", "msr2)"));
 -    exprs.add(new ExprColumn(new FieldSchema("singlecolmsr2qualifiedexpr", "double", "testcube.measure2"),
 -      "Msr2", "testcube.msr2"));
 -    exprs.add(new ExprColumn(new FieldSchema("singlecoldim1expr", "string", "dim1"), "dim1", "dim1)"));
 -    exprs.add(new ExprColumn(new FieldSchema("singlecoldim1qualifiedexpr", "string", "testcube.dim1"),
 -      "dim1", "testcube.dim1"));
 -    exprs.add(new ExprColumn(new FieldSchema("singlecolchainid", "string", "dim3chain.id"),
 -      "dim3chainid", "dim3chain.id)"));
 -    exprs.add(new ExprColumn(new FieldSchema("singlecolchainrefexpr", "string", "testcube.testDim3id"),
 -      "dim3chainid", "testcube.testDim3id"));
 -    exprs.add(new ExprColumn(new FieldSchema("singlecolchainfield", "string", "cubecity.name"),
 -      "cubecityname", "cubecity.name"));
 -    exprs.add(new ExprColumn(new FieldSchema("summsrs", "double", "sum measures"), "Sum Msrs",
 -      "(1000 + sum(msr1) + sum(msr2))/100"));
 -    exprs.add(new ExprColumn(new FieldSchema("msr5", "double", "materialized in some facts"), "Fifth Msr",
 -      "msr2 + msr3"));
 -    exprs.add(new ExprColumn(new FieldSchema("msr8", "double", "measure expression"), "Sixth Msr",
 -      "msr2 + msr3"));
 -    exprs.add(new ExprColumn(new FieldSchema("msr7", "double", "measure expression"), "Seventh Msr",
 -      "case when sum(msr2) = 0 then 0 else sum(case when cityid='x' then msr21 else msr22 end)/sum(msr2) end"));
 -    exprs.add(new ExprColumn(new FieldSchema("equalsums", "double", "sums are equals"), "equalsums",
 -      new ExprSpec("msr3 + msr4", null, null), new ExprSpec("(msr3 + msr2)/100", null, null)));
 -    exprs.add(new ExprColumn(new FieldSchema("roundedmsr1", "double", "rounded measure1"), "Rounded msr1",
 -      "round(msr1/1000)"));
 -    exprs.add(new ExprColumn(new FieldSchema("roundedmsr2", "double", "rounded measure2"), "Rounded msr2",
 -      "round(msr2/1000)"));
 -    exprs.add(new ExprColumn(new FieldSchema("flooredmsr12", "double", "floored measure12"), "Floored msr12",
 -            "floor(msr12)"));
 -    exprs.add(new ExprColumn(new FieldSchema("nestedexpr", "double", "nested expr"), "Nested expr",
 -      new ExprSpec("avg(roundedmsr2)", null, null), new ExprSpec("avg(equalsums)", null, null),
 -      new ExprSpec("case when substrexpr = 'xyz' then avg(msr5) when substrexpr = 'abc' then avg(msr4)/100 end",
 -        null, null)));
 -    exprs.add(new ExprColumn(new FieldSchema("msr2expr", "double", "nested expr"), "Nested expr",
 -      new ExprSpec("case when cityStateName = 'xyz' then msr2 else 0 end", null, null)));
 -    exprs.add(new ExprColumn(new FieldSchema("nestedExprWithTimes", "double", "nested expr"), "Nested expr",
 -      new ExprSpec("avg(roundedmsr2)", null, null), new ExprSpec("avg(equalsums)", null, null),
 -      new ExprSpec("case when substrexpr = 'xyz' then avg(msr5) when substrexpr = 'abc' then avg(msr4)/100 end",
 -        NOW, null), new ExprSpec("avg(newmeasure)", null, null)));
 -    exprs.add(new ExprColumn(new FieldSchema("msr6", "bigint", "sixth measure"), "Measure6",
 -      "sum(msr2) + max(msr3)/ count(msr4)"));
 -    exprs.add(new ExprColumn(new FieldSchema("booleancut", "boolean", "a boolean expression"), "Boolean cut",
 -      "(dim1 != 'x' AND dim2 != 10)"));
 -    exprs.add(new ExprColumn(new FieldSchema("substrexpr", "string", "a sub-string expression"), "Substr expr",
 -      new ExprSpec("substr(dim1, 3))", null, null), new ExprSpec("substr(ascii(dim2chain.name), 3)", null, null)));
 -    exprs.add(new ExprColumn(new FieldSchema("substrexprdim2", "string", "a sub-string expression"), "Substr expr",
 -      new ExprSpec("substr(dim2, 3))", null, null), new ExprSpec("substr(ascii(dim2chain.name), 3)", null, null)));
 -    exprs.add(new ExprColumn(new FieldSchema("indiasubstr", "boolean", "nested sub string expression"), "Nested expr",
 -      "substrexpr = 'INDIA'"));
 -    exprs.add(new ExprColumn(new FieldSchema("refexpr", "string", "expression which facts and dimensions"),
 -      "Expr with cube and dim fields", "concat(dim1, \":\", citydim.name)"));
 -    exprs.add(new ExprColumn(new FieldSchema("nocolexpr", "string", "expression which non existing colun"),
 -      "No col expr", "myfun(nonexist)"));
 -    exprs.add(new ExprColumn(new FieldSchema("newexpr", "string", "expression which non existing colun"),
 -      "new measure expr", "myfun(newmeasure)"));
 -    exprs.add(new ExprColumn(new FieldSchema("cityAndState", "String", "city and state together"), "City and State",
 -      new ExprSpec("concat(cityname, \":\", statename_cube)", null, null),
 -      new ExprSpec("substr(concatedcitystate, 10)", null, null)));
 -    exprs.add(new ExprColumn(new FieldSchema("cityAndStateNew", "String", "city and state together"), "City and State",
 -      new ExprSpec("concat(cityname, \":\", statename_cube)", null, TWO_MONTHS_BACK),
 -      new ExprSpec("substr(concatedcitystate, 10)", null, null)));
 -    exprs.add(new ExprColumn(new FieldSchema("cityStateName", "String", "city state"), "City State",
 -      "concat('CityState:', cubecity.statename)"));
 -    exprs.add(new ExprColumn(new FieldSchema("isIndia", "String", "is indian city/state"), "Is Indian City/state",
 -      "cubecity.name == 'DELHI' OR cubestate.name == 'KARNATAKA' OR cubestate.name == 'MAHARASHTRA'"));
 -    exprs.add(new ExprColumn(new FieldSchema("cubeStateName", "String", "statename from cubestate"), "CubeState Name",
 -      "substr(cubestate.name, 5)"));
 -    exprs.add(new ExprColumn(new FieldSchema("substrdim2big1", "String", "substr of dim2big1"), "dim2big1 substr",
 -      "substr(dim2big1, 5)"));
 -    exprs.add(new ExprColumn(new FieldSchema("asciicity", "String", "ascii cityname"), "ascii cityname substr",
 -      "ascii(cityname)"));
 -    exprs.add(new ExprColumn(new FieldSchema("countofdistinctcityid", "int", "Count of Distinct CityId"),
 -        "Count of Distinct CityId Expr", "count(distinct(cityid))"));
 -    exprs.add(new ExprColumn(new FieldSchema("notnullcityid", "int", "Not null cityid"),
 -        "Not null cityid Expr", "case when cityid is null then 0 else cityid end"));
 -
 -    Map<String, String> cubeProperties = new HashMap<String, String>();
 -    cubeProperties.put(MetastoreUtil.getCubeTimedDimensionListKey(TEST_CUBE_NAME),
 -      "d_time,pt,it,et,test_time_dim,test_time_dim2");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "test_time_dim", "ttd");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "test_time_dim2", "ttd2");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "d_time", "dt");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "it", "it");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "et", "et");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "pt", "pt");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_RELATION + "d_time", "test_time_dim+[-10 days,10 days]");
 -
 -    client.createCube(TEST_CUBE_NAME, cubeMeasures, cubeDimensions, exprs, Sets.newHashSet(joinChains.values()),
 -      cubeProperties);
 -
 -    Set<String> measures = new HashSet<String>();
 -    measures.add("msr1");
 -    measures.add("msr2");
 -    measures.add("msr3");
 -    measures.add("msr9");
 -    Set<String> dimensions = new HashSet<String>();
 -    dimensions.add("dim1");
 -    dimensions.add("dim2");
 -    dimensions.add("dim2big1");
 -    dimensions.add("dim2big2");
 -    dimensions.add("dim2bignew");
 -    // Try creating derived cube with non existant dim/measures
 -    try{
 -      client.createDerivedCube(TEST_CUBE_NAME, DERIVED_CUBE_NAME,
 -        Sets.newHashSet("random_measure"), Sets.newHashSet("random_dim_attribute"),
 -        new HashMap<String, String>(), 5L);
 -    } catch(LensException e) {
 -      assertTrue(e.getMessage().contains("random_measure"));
 -      assertTrue(e.getMessage().contains("random_dim_attribute"));
 -      assertTrue(e.getMessage().contains("not present"));
 -    }
 -    client.createDerivedCube(TEST_CUBE_NAME, DERIVED_CUBE_NAME,
 -      measures, dimensions, new HashMap<String, String>(), 5L);
 -  }
 -
 -  private void addCubeChains(Map<String, JoinChain> joinChains, final String cubeName) {
 -    joinChains.put("timehourchain1", new JoinChain("timehourchain1", "time chain", "time dim thru hour dim") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "test_time_dim_hour_id"));
 -            add(new TableReference("hourdim", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("timedatechain1", new JoinChain("timedatechain1", "time chain", "time dim thru date dim") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "test_time_dim_day_id"));
 -            add(new TableReference("daydim", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("timehourchain2", new JoinChain("timehourchain2", "time chain", "time dim thru hour dim") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "test_time_dim_hour_id2"));
 -            add(new TableReference("hourdim", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("timedatechain2", new JoinChain("timedatechain2", "time chain", "time dim thru date dim") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "test_time_dim_day_id2"));
 -            add(new TableReference("daydim", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("cubeCity", new JoinChain("cubeCity", "cube-city", "city thru cube") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "cityid"));
 -            add(new TableReference("citydim", "id"));
 -          }
 -        });
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "dim2"));
 -            add(new TableReference("testdim2", "id"));
 -            add(new TableReference("testdim2", "cityid"));
 -            add(new TableReference("citydim", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("cubeCity1", new JoinChain("cubeCity1", "cube-city", "city thru cube") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "cityid1"));
 -            add(new TableReference("citydim", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("cubeCity2", new JoinChain("cubeCity2", "cube-city", "city thru cube") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "cityid2"));
 -            add(new TableReference("citydim", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("cubeState",  new JoinChain("cubeState", "cube-state", "state thru cube") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "stateid"));
 -            add(new TableReference("statedim", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("cubeZip",  new JoinChain("cubeZip", "cube-zip", "Zipcode thru cube") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "zipcode"));
 -            add(new TableReference("zipdim", "code"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("cubeCountry",  new JoinChain("cubeCountry", "cube-country", "country thru cube") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "countryid"));
 -            add(new TableReference("countrydim", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("dim2chain", new JoinChain("dim2chain", "cube-testdim2", "testdim2 thru cube") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "dim2"));
 -            add(new TableReference("testdim2", "id"));
 -          }
 -        });
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "dim2big1"));
 -            add(new TableReference("testdim2", "bigid1"));
 -          }
 -        });
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "dim2big2"));
 -            add(new TableReference("testdim2", "bigid2"));
 -          }
 -        });
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "dim2bignew"));
 -            add(new TableReference("testdim2", "bigidnew"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("dim3chain", new JoinChain("dim3chain", "cube-testdim3", "cyclicdim thru cube") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "dim2"));
 -            add(new TableReference("testdim2", "id"));
 -            add(new TableReference("testdim2", "testdim3id"));
 -            add(new TableReference("testdim3", "id"));
 -          }
 -        });
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "dim2big1"));
 -            add(new TableReference("testdim2", "bigid1"));
 -            add(new TableReference("testdim2", "testdim3id"));
 -            add(new TableReference("testdim3", "id"));
 -          }
 -        });
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "dim2big2"));
 -            add(new TableReference("testdim2", "bigid2"));
 -            add(new TableReference("testdim2", "testdim3id"));
 -            add(new TableReference("testdim3", "id"));
 -          }
 -        });
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "dim2bignew"));
 -            add(new TableReference("testdim2", "bigidnew"));
 -            add(new TableReference("testdim2", "testdim3id"));
 -            add(new TableReference("testdim3", "id"));
 -          }
 -        });
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "testdim3id"));
 -            add(new TableReference("testdim3", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("dim4chain", new JoinChain("dim4chain", "cube-testdim3", "cyclicdim thru cube") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "dim2"));
 -            add(new TableReference("testdim2", "id"));
 -            add(new TableReference("testdim2", "testdim3id"));
 -            add(new TableReference("testdim3", "id"));
 -            add(new TableReference("testdim3", "testdim4id"));
 -            add(new TableReference("testdim4", "id"));
 -          }
 -        });
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "dim2big1"));
 -            add(new TableReference("testdim2", "bigid1"));
 -            add(new TableReference("testdim2", "testdim3id"));
 -            add(new TableReference("testdim3", "id"));
 -            add(new TableReference("testdim3", "testdim4id"));
 -            add(new TableReference("testdim4", "id"));
 -          }
 -        });
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "dim2big2"));
 -            add(new TableReference("testdim2", "bigid2"));
 -            add(new TableReference("testdim2", "testdim3id"));
 -            add(new TableReference("testdim3", "id"));
 -            add(new TableReference("testdim3", "testdim4id"));
 -            add(new TableReference("testdim4", "id"));
 -          }
 -        });
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "dim2bignew"));
 -            add(new TableReference("testdim2", "bigidnew"));
 -            add(new TableReference("testdim2", "testdim3id"));
 -            add(new TableReference("testdim3", "id"));
 -            add(new TableReference("testdim3", "testdim4id"));
 -            add(new TableReference("testdim4", "id"));
 -          }
 -        });
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "testdim3id"));
 -            add(new TableReference("testdim3", "id"));
 -            add(new TableReference("testdim3", "testdim4id"));
 -            add(new TableReference("testdim4", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("cdimChain", new JoinChain("cdimChain", "cube-cyclicdim", "cyclicdim thru cube") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "cdim2"));
 -            add(new TableReference("cycledim1", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("unreachableDim_chain", new JoinChain("unreachableDim_chain", "cube-unreachableDim",
 -      "unreachableDim thru cube") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "urdimid"));
 -            add(new TableReference("unreachableDim", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.put("cubeCountry",  new JoinChain("cubeCountry", "cube-country", "country thru cube") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference(cubeName, "countryid"));
 -            add(new TableReference("countrydim", "id"));
 -          }
 -        });
 -      }
 -    });
 -  }
 -  private void createBaseAndDerivedCubes(CubeMetastoreClient client)
 -    throws HiveException, ParseException, LensException {
 -    Set<CubeMeasure> cubeMeasures2 = new HashSet<>(cubeMeasures);
 -    Set<CubeDimAttribute> cubeDimensions2 = new HashSet<>(cubeDimensions);
 -    cubeMeasures2.add(new ColumnMeasure(new FieldSchema("msr11", "int", "first measure")));
 -    cubeMeasures2.add(new ColumnMeasure(new FieldSchema("msr12", "float", "second measure"), "Measure2", null, "SUM",
 -      "RS"));
 -    cubeMeasures2.add(new ColumnMeasure(new FieldSchema("msr13", "double", "third measure"), "Measure3", null, "MAX",
 -      null));
 -    cubeMeasures2.add(new ColumnMeasure(new FieldSchema("msr14", "bigint", "fourth measure"), "Measure4", null,
 -      "COUNT", null));
 -    cubeMeasures2.add(new ColumnMeasure(new FieldSchema("directMsr", "bigint", "fifth measure"), "Direct Measure",
 -      null, "SUM", null));
 -
 -    cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("dim11", "string", "basedim")));
 -    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("dim12", "int", "ref dim"), "Dim2 refer",
 -      "dim2chain", "id", null, null, null)); // used as key in the chains
 -    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("dim22", "int", "ref dim"), "Dim2 refer",
 -      "dim2chain", "id", null, null, null)); // not used as key in the chains
 -    cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("dim13", "string", "basedim")));
 -    cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("userid", "int", "userid")));
 -    cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("xuserid", "int", "userid")));
 -    cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("yuserid", "int", "userid")));
 -    cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("user_id_added_in_past", "int", "user_id_added_in_past")));
 -    cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("user_id_added_far_future", "int",
 -        "user_id_added_far_future")));
 -    cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("user_id_deprecated", "int", "user_id_deprecated")));
 -
 -    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("xsports", "array<string>", ""),
 -      "xuser sports", "xusersports", "name", null, null, null));
 -    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("ysports", "array<string>", ""),
 -      "yuser sports", "yusersports", "name", null, null, null));
 -    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("sports", "array<string>", ""),
 -      "user sports", "usersports", "name", null, null, null));
 -    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("sportids", "array<int>", ""),
 -      "user sports", "userInterestIds", "sport_id", null, null, null));
 -    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("statecountry", "string", ""),
 -      "state country", "cubestatecountry", "name", null, null, null));
 -    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("citycountry", "string", ""),
 -      "city country", "cubecitystatecountry", "name", null, null, null));
 -    List<ChainRefCol> refCols = new ArrayList<>();
 -    refCols.add(new ChainRefCol("cubeState", "countrycapital"));
 -    refCols.add(new ChainRefCol("cubeCityStateCountry", "capital"));
 -    cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("cubeCountryCapital", "String", "ref dim"),
 -      "Country capital", refCols, null, null, null, null));
 -    Map<String, String> cubeProperties = new HashMap<>();
 -    cubeProperties.put(MetastoreUtil.getCubeTimedDimensionListKey(BASE_CUBE_NAME),
 -      "d_time,pt,it,et,test_time_dim,test_time_dim2");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "test_time_dim", "ttd");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "test_time_dim2", "ttd2");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "d_time", "dt");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "it", "it");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "et", "et");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "processing_time", "pt");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_RELATION + "d_time", "processing_time+[-5 days,5 days]");
 -    cubeProperties.put(MetastoreConstants.TIMEDIM_RELATION + "processing_time", "test_time_dim+[-5 days,5 days]");
 -    cubeProperties.put(MetastoreConstants.CUBE_ALL_FIELDS_QUERIABLE, "false");
 -
 -    Map<String, JoinChain> joinChainMap = new HashMap<>();
 -    addCubeChains(joinChainMap, "basecube");
 -    // update new paths
 -    joinChainMap.get("dim2chain").addPath(new ArrayList<TableReference>() {
 -      {
 -        add(new TableReference("basecube", "dim12"));
 -        add(new TableReference("testdim2", "id"));
 -      }
 -    });
 -    joinChainMap.get("dim3chain").addPath(new ArrayList<TableReference>() {
 -      {
 -        add(new TableReference("basecube", "dim12"));
 -        add(new TableReference("testdim2", "id"));
 -        add(new TableReference("testdim2", "testdim3id"));
 -        add(new TableReference("testdim3", "id"));
 -      }
 -    });
 -    joinChainMap.get("dim4chain").addPath(new ArrayList<TableReference>() {
 -      {
 -        add(new TableReference("basecube", "dim12"));
 -        add(new TableReference("testdim2", "id"));
 -        add(new TableReference("testdim2", "testdim3id"));
 -        add(new TableReference("testdim3", "id"));
 -        add(new TableReference("testdim3", "testdim4id"));
 -        add(new TableReference("testdim4", "id"));
 -      }
 -    });
 -    Set<JoinChain> joinChains = Sets.newHashSet(joinChainMap.values());
 -    joinChains.add(new JoinChain("cityState", "city-state", "state thru city") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference("basecube", "cityid"));
 -            add(new TableReference("citydim", "id"));
 -            add(new TableReference("citydim", "stateid"));
 -            add(new TableReference("statedim", "id"));
 -          }
 -        });
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference("basecube", "cityid"));
 -            add(new TableReference("citydim", "id"));
 -            add(new TableReference("citydim", "statename"));
 -            add(new TableReference("statedim", "name"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.add(new JoinChain("cityZip", "city-zip", "zip thru city") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference("basecube", "cityid"));
 -            add(new TableReference("citydim", "id"));
 -            add(new TableReference("citydim", "zipcode"));
 -            add(new TableReference("zipdim", "code"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.add(new JoinChain("cubeStateCountry", "cube-state-country", "country through state") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference("basecube", "stateid"));
 -            add(new TableReference("statedim", "id"));
 -            add(new TableReference("statedim", "countryid"));
 -            add(new TableReference("countrydim", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.add(new JoinChain("cubeCityStateCountry", "cube-city-state-country", "country through state thru city") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference("basecube", "cityid"));
 -            add(new TableReference("citydim", "id"));
 -            add(new TableReference("citydim", "stateid"));
 -            add(new TableReference("statedim", "id"));
 -            add(new TableReference("statedim", "countryid"));
 -            add(new TableReference("countrydim", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.add(new JoinChain("userchain", "user-chain", "user chain") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference("basecube", "userid"));
 -            add(new TableReference("userdim", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.add(new JoinChain("user_id_added_far_future_chain", "user_id_added_far_future_chain",
 -        "user_id_added_far_future_chain") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference("basecube", "user_id_added_far_future"));
 -            add(new TableReference("userdim", "user_id_added_far_future"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.add(new JoinChain("userSports", "user-sports", "user sports") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference("basecube", "userid"));
 -            add(new TableReference("userdim", "id"));
 -            add(new TableReference("userdim", "id"));
 -            add(new TableReference("user_interests", "user_id", true));
 -            add(new TableReference("user_interests", "sport_id"));
 -            add(new TableReference("sports", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.add(new JoinChain("userInterestIds", "user-interestsIds", "user interest ids") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference("basecube", "userid"));
 -            add(new TableReference("userdim", "id"));
 -            add(new TableReference("userdim", "id"));
 -            add(new TableReference("user_interests", "user_id", true));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.add(new JoinChain("xuserSports", "xuser-sports", "xuser sports") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference("basecube", "xuserid"));
 -            add(new TableReference("userdim", "id"));
 -            add(new TableReference("userdim", "id"));
 -            add(new TableReference("user_interests", "user_id", true));
 -            add(new TableReference("user_interests", "sport_id"));
 -            add(new TableReference("sports", "id"));
 -          }
 -        });
 -      }
 -    });
 -    joinChains.add(new JoinChain("yuserSports", "user-sports", "user sports") {
 -      {
 -        addPath(new ArrayList<TableReference>() {
 -          {
 -            add(new TableReference("basecube", "yuserid"));
 -            add(new TableReference("userdim", "id"));
 -            add(new TableReference("userdim", "id"));
 -            add(new TableReference("user_interests", "user_id", true));
 -            add(new TableReference("user_interests", "sport_id"));
 -            add(new TableReference("sports", "id"));
 -          }
 -        });
 -      }
 -    });
 -
 -    // add ref dim through chain
 -    cubeDimensions2.add(
 -      new ReferencedDimAttribute(new FieldSchema("cityStateCapital", "string", "State's capital thru city"),
 -        "State's capital thru city", "cityState", "capital", null, null, null));
 -    Set<ExprColumn> baseExprs = new HashSet<>(exprs);
 -    baseExprs.add(new ExprColumn(new FieldSchema("substrsprorts", "String", "substr of sports"), "substr sports",
 -      "substr(sports, 10)"));
 -    baseExprs.add(new ExprColumn(new FieldSchema("xsports_abbr", "array<string>", ""),
 -      "xuser sports", "substr(xsports, 3)"));
 -    baseExprs.add(new ExprColumn(new FieldSchema("ysports_abbr", "array<string>", ""),
 -      "yuser sports", "substr(ysports, 3)"));
 -    baseExprs.add(new ExprColumn(new FieldSchema("sports_abbr", "array<string>", ""),
 -      "user sports", "substr(sports, 3)"));
 -    baseExprs.add(new ExprColumn(new FieldSchema("sportids_abbr", "array<string>", ""),
 -      "user sports", "case when sportids == 1 then 'CKT' when sportids == 2 then 'FTB' else 'NON' end"));
 -    baseExprs.add(new ExprColumn(new FieldSchema("directMsrExpr", "bigint", ""),
 -      "Direct Measure", new ExprSpec("directMsr + 0", null, null), new ExprSpec("msr13 + msr14", null, null)));
 -    client.createCube(BASE_CUBE_NAME, cubeMeasures2, cubeDimensions2, baseExprs, joinChains, cubeProperties);
 -
 -    Map<String, String> derivedProperties = new HashMap<>();
 -    derivedProperties.put(MetastoreConstants.CUBE_ALL_FIELDS_QUERIABLE, "true");
 -    Set<String> measures = new HashSet<>();
 -    measures.add("msr1");
 -    measures.add("msr9");
 -    measures.add("msr11");
 -    Set<String> dimensions = new HashSet<>();
 -    dimensions.add("dim1");
 -    dimensions.add("dim11");
 -    dimensions.add("d_time");
 -    client.createDerivedCube(BASE_CUBE_NAME, DERIVED_CUBE_NAME1, measures, dimensions, derivedProperties, 5L);
 -
 -    measures = new HashSet<>();
 -    measures.add("msr2");
 -    measures.add("msr12");
 -    measures.add("msr13");
 -    measures.add("msr14");
 -    measures.add("directmsr");
 -    dimensions = new HashSet<>();
 -    dimensions.add("cityid");
 -    dimensions.add("stateid");
 -    dimensions.add("userid");
 -    dimensions.add("xuserid");
 -    dimensions.add("yuserid");
 -    dimensions.add("dim1");
 -    dimensions.add("dim2");
 -    dimensions.add("dim2big1");
 -    dimensions.add("dim2big2");
 -    dimensions.add("dim2bignew");
 -    dimensions.add("dim11");
 -    dimensions.add("dim13");
 -    dimensions.add("dim12");
 -    dimensions.add("dim22");
 -    dimensions.add("d_time");
 -    dimensions.add("test_time_dim");
 -    dimensions.add("test_time_dim2");
 -    dimensions.add("test_time_dim_hour_id");
 -    dimensions.add("test_time_dim_day_id");
 -    dimensions.add("test_time_dim_hour_id2");
 -    dimensions.add("test_time_dim_day_id2");
 -    client.createDerivedCube(BASE_CUBE_NAME, DERIVED_CUBE_NAME2, measures, dimensions, derivedProperties, 10L);
 -    measures = new HashSet<>();
 -    measures.add("msr3");
 -    measures.add("msr13");
 -    dimensions = new HashSet<>();
 -    dimensions.add("dim1");
 -    dimensions.add("location");
 -    dimensions.add("d_time");
 -    dimensions.add("test_time_dim");
 -    dimensions.add("test_time_dim2");
 -    dimensions.add("test_time_dim_hour_id");
 -    dimensions.add("test_time_dim_day_id");
 -    dimensions.add("test_time_dim_hour_id2");
 -    dimensions.add("test_time_dim_day_id2");
 -    client.createDerivedCube(BASE_CUBE_NAME, DERIVED_CUBE_NAME3, measures, dimensions, derivedProperties, 20L);
 -
 -    // create base cube facts
 -    createBaseCubeFacts(client);
 -  }
 -
 -  private void createBaseCubeFacts(CubeMetastoreClient client) throws HiveException, LensException {
 -
 -    Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
 -    Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
 -    updates.add(MINUTELY);
 -    updates.add(HOURLY);
 -    updates.add(DAILY);
 -    updates.add(MONTHLY);
 -    updates.add(QUARTERLY);
 -    updates.add(YEARLY);
 -
 -    ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
 -    List<String> timePartCols = new ArrayList<String>();
 -    partCols.add(TestCubeMetastoreClient.getDatePartition());
 -    timePartCols.add(TestCubeMetastoreClient.getDatePartitionKey());
 -
 -    StorageTableDesc s1 = new StorageTableDesc();
 -    s1.setInputFormat(TextInputFormat.class.getCanonicalName());
 -    s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
 -    s1.setPartCols(partCols);
 -    s1.setTimePartCols(timePartCols);
 -
 -    StorageTableDesc s2 = new StorageTableDesc();
 -    s2.setInputFormat(TextInputFormat.class.getCanonicalName());
 -    s2.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
 -    ArrayList<FieldSchema> s2PartCols = new ArrayList<FieldSchema>();
 -    s2PartCols.add(new FieldSchema("ttd", serdeConstants.STRING_TYPE_NAME, "test date partition"));
 -    s2PartCols.add(new FieldSchema("ttd2", serdeConstants.STRING_TYPE_NAME, "test date partition"));
 -    s2.setPartCols(s2PartCols);
 -    s2.setTimePartCols(Arrays.asList("ttd", "ttd2"));
 -
 -    storageAggregatePeriods.put(c1, updates);
 -    storageAggregatePeriods.put(c2, updates);
 -    storageAggregatePeriods.put(c3, updates);
 -    storageAggregatePeriods.put(c4, updates);
 -
 -    Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
 -    storageTables.put(c1, s1);
 -    storageTables.put(c4, s2);
 -    storageTables.put(c2, s1);
 -    storageTables.put(c3, s1);
 -
 -    String factName = "testFact1_BASE";
 -    List<FieldSchema> factColumns = new ArrayList<FieldSchema>(cubeMeasures.size());
 -    for (CubeMeasure measure : cubeMeasures) {
 -      factColumns.add(measure.getColumn());
 -    }
  
 -    // add dimensions of the cube
 -    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
 -    factColumns.add(new FieldSchema("processing_time", "timestamp", "processing time"));
 -    factColumns.add(new FieldSchema("zipcode", "int", "zip"));
 -    factColumns.add(new FieldSchema("cityid", "int", "city id"));
 -    factColumns.add(new FieldSchema("stateid", "int", "state id"));
 -    factColumns.add(new FieldSchema("userid", "int", "user id"));
 -    factColumns.add(new FieldSchema("xuserid", "int", "user id"));
 -    factColumns.add(new FieldSchema("yuserid", "int", "user id"));
 -    factColumns.add(new FieldSchema("dim1", "string", "base dim"));
 -    factColumns.add(new FieldSchema("dim11", "string", "base dim"));
 -    factColumns.add(new FieldSchema("test_time_dim_hour_id", "int", "time id"));
 -
 -    // create cube fact with materialized expressions
 -    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L,
 -      factValidityProperties, storageTables);
 -
 -    factName = "testFact5_BASE";
 -    factColumns = new ArrayList<>(cubeMeasures.size());
 -    for (CubeMeasure measure : cubeMeasures) {
 -      factColumns.add(measure.getColumn());
 -    }
 -
 -    // add dimensions of the cube
 -    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
 -    factColumns.add(new FieldSchema("processing_time", "timestamp", "processing time"));
 -    factColumns.add(new FieldSchema("dim1", "string", "base dim"));
 -    factColumns.add(new FieldSchema("booleancut", "boolean", "expr dim"));
 -
 -    // create cube fact
 -    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 150L,
 -      factValidityProperties, storageTables);
 -
 -    // create fact only with extra measures
 -    factName = "testFact2_BASE";
 -    factColumns = new ArrayList<FieldSchema>();
 -    factColumns.add(new FieldSchema("msr12", "float", "second measure"));
 -
 -    // add dimensions of the cube
 -    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
 -    factColumns.add(new FieldSchema("processing_time", "timestamp", "processing time"));
 -    factColumns.add(new FieldSchema("dim1", "string", "base dim"));
 -    factColumns.add(new FieldSchema("dim11", "string", "base dim"));
 -    factColumns.add(new FieldSchema("dim2", "int", "dim2 id"));
 -    factColumns.add(new FieldSchema("userid", "int", "user id"));
 -    factColumns.add(new FieldSchema("xuserid", "int", "user id"));
 -    factColumns.add(new FieldSchema("yuserid", "int", "user id"));
 -    // create cube fact
 -    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L,
 -      factValidityProperties, storageTables);
 -    Map<String, String> properties = Maps.newHashMap(factValidityProperties);
 -    properties.put(MetastoreConstants.FACT_ABSOLUTE_END_TIME, DateUtil.relativeToAbsolute("now.day - 2 days"));
 -    properties.put(MetastoreConstants.FACT_ABSOLUTE_START_TIME, DateUtil.relativeToAbsolute("now.day - 3 days"));
 -    client.createCubeFactTable(BASE_CUBE_NAME, "testfact_deprecated", factColumns, storageAggregatePeriods, 5L,
 -      properties, storageTables);
 -
 -    // create fact only with extra measures
 -    factName = "testFact3_BASE";
 -    factColumns = new ArrayList<FieldSchema>();
 -    factColumns.add(new FieldSchema("msr13", "double", "third measure"));
 -    factColumns.add(new FieldSchema("msr14", "bigint", "fourth measure"));
 -
 -    // add dimensions of the cube
 -    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
 -    factColumns.add(new FieldSchema("processing_time", "timestamp", "processing time"));
 -    factColumns.add(new FieldSchema("dim1", "string", "base dim"));
 -    factColumns.add(new FieldSchema("dim11", "string", "base dim"));
 -
 -    // create cube fact
 -    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L,
 -      factValidityProperties, storageTables);
 -
 -    // create fact with materialized expression
 -    factName = "testFact6_BASE";
 -    factColumns = new ArrayList<>();
 -    factColumns.add(new FieldSchema("msr13", "double", "third measure"));
 -    factColumns.add(new FieldSchema("msr14", "bigint", "fourth measure"));
 -
 -    // add dimensions of the cube
 -    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
 -    factColumns.add(new FieldSchema("processing_time", "timestamp", "processing time"));
 -    factColumns.add(new FieldSchema("dim1", "string", "base dim"));
 -    factColumns.add(new FieldSchema("booleancut", "boolean", "expr dim"));
 -
 -    // create cube fact
 -    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 150L,
 -      factValidityProperties, storageTables);
 -
 -    // create raw fact only with extra measures
 -    factName = "testFact2_RAW_BASE";
 -    factColumns = new ArrayList<FieldSchema>();
 -    factColumns.add(new FieldSchema("msr11", "int", "first measure"));
 -    factColumns.add(new FieldSchema("msr12", "float", "second measure"));
 -    factColumns.add(new FieldSchema("msr9", "bigint", "ninth measure"));
 -
 -    // add dimensions of the cube
 -    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
 -    factColumns.add(new FieldSchema("processing_time", "timestamp", "processing time"));
 -    factColumns.add(new FieldSchema("dim1", "string", "base dim"));
 -    factColumns.add(new FieldSchema("dim11", "string", "base dim"));
 -    factColumns.add(new FieldSchema("dim13", "string", "base dim"));
 -    factColumns.add(new FieldSchema("dim12", "string", "base dim"));
 -    factColumns.add(new FieldSchema("dim22", "string", "base dim"));
 -    factColumns.add(new FieldSchema("cityid", "int", "city id"));
 -
 -    storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
 -    updates = new HashSet<UpdatePeriod>();
 -    updates.add(HOURLY);
 -    storageAggregatePeriods.put(c1, updates);
 -
 -    storageTables = new HashMap<String, StorageTableDesc>();
 -    storageTables.put(c1, s1);
 -
 -    // create cube fact
 -    properties.clear();
 -    properties.putAll(factValidityProperties);
 -    properties.put(MetastoreConstants.FACT_AGGREGATED_PROPERTY, "false");
 -    properties.put(MetastoreConstants.FACT_DATA_COMPLETENESS_TAG, "f2");
 -
 -    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 100L, properties,
 -      storageTables);
 -
 -    // create raw fact only with extra measures
 -    factName = "testFact3_RAW_BASE";
 -    factColumns = new ArrayList<FieldSchema>();
 -    factColumns.add(new FieldSchema("msr13", "double", "third measure"));
 -    factColumns.add(new FieldSchema("msr14", "bigint", "fourth measure"));
 -
 -    // add dimensions of the cube
 -    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
 -    factColumns.add(new FieldSchema("processing_time", "timestamp", "processing time"));
 -    factColumns.add(new FieldSchema("dim1", "string", "base dim"));
 -    factColumns.add(new FieldSchema("dim11", "string", "base dim"));
 -    factColumns.add(new FieldSchema("dim12", "string", "base dim"));
 -
 -    storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
 -    updates = new HashSet<UpdatePeriod>();
 -    updates.add(HOURLY);
 -    storageAggregatePeriods.put(c1, updates);
 -
 -    storageTables = new HashMap<String, StorageTableDesc>();
 -    storageTables.put(c1, s1);
 -    properties.put(MetastoreConstants.FACT_COL_START_TIME_PFX.concat("user_id_added_in_past"), "2016-01-01");
 -    properties.put(MetastoreConstants.FACT_COL_END_TIME_PFX.concat("user_id_deprecated"), "2016-01-01");
 -    properties.put(MetastoreConstants.FACT_COL_START_TIME_PFX.concat("user_id_added_far_future"), "2099-01-01");
 -    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 100L, properties,
 -      storageTables);
 -
 -    factName = "testFact4_RAW_BASE";
 -    factColumns = new ArrayList<FieldSchema>();
 -    factColumns.add(new FieldSchema("msr13", "double", "third measure"));
 -    factColumns.add(new FieldSchema("msr14", "bigint", "fourth measure"));
 -
 -    // add dimensions of the cube
 -    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
 -    factColumns.add(new FieldSchema("processing_time", "timestamp", "processing time"));
 -    factColumns.add(new FieldSchema("dim1", "string", "base dim"));
 -    factColumns.add(new FieldSchema("user_id_added_in_past", "int", "user id"));
 -    factColumns.add(new FieldSchema("user_id_added_far_future", "int", "user id"));
 -    factColumns.add(new FieldSchema("user_id_deprecated", "int", "user id"));
 -
 -    storageTables = new HashMap<String, StorageTableDesc>();
 -    storageTables.put(c1, s1);
 -    properties.put(MetastoreConstants.FACT_COL_START_TIME_PFX.concat("user_id_added_in_past"), "2016-01-01");
 -    properties.put(MetastoreConstants.FACT_COL_END_TIME_PFX.concat("user_id_deprecated"), "2016-01-01");
 -    properties.put(MetastoreConstants.FACT_COL_START_TIME_PFX.concat("user_id_added_far_future"), "2099-01-01");
 -    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 100L, properties,
 -        storageTables);
 -
 -    factName = "testFact5_RAW_BASE";
 -    factColumns = new ArrayList<FieldSchema>();
 -    factColumns.add(new FieldSchema("msr9", "bigint", "ninth measure"));
 -
 -    // add dimensions of the cube
 -    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
 -    factColumns.add(new FieldSchema("processing_time", "timestamp", "processing time"));
 -    factColumns.add(new FieldSchema("dim1", "string", "base dim"));
 -
 -    properties.clear();
 -    properties.putAll(factValidityProperties);
 -    properties.put(MetastoreConstants.FACT_AGGREGATED_PROPERTY, "false");
 -    properties.put(MetastoreConstants.FACT_DATA_COMPLETENESS_TAG, "f2");
 -    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 100L, properties,
 -            storageTables);
 -
 -    CubeFactTable fact = client.getFactTable(factName);
 -    // Add all hourly partitions for two days
 -    Calendar cal = Calendar.getInstance();
 -    cal.setTime(TWODAYS_BACK);
 -    Date temp = cal.getTime();
 -    while (!(temp.after(NOW))) {
 -      Map<String, Date> timeParts = new HashMap<String, Date>();
 -      timeParts.put("dt", temp);
 -      StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, HOURLY);
 -      client.addPartition(sPartSpec, c1, CubeTableType.FACT);
 -      cal.add(HOUR_OF_DAY, 1);
 -      temp = cal.getTime();
 -    }
 -  }
 -
 -  private void createCubeContinuousFact(CubeMetastoreClient client) throws Exception {
 -    // create continuous raw fact only with extra measures
 -    String factName = "testFact_CONTINUOUS";
 -    List<FieldSchema> factColumns = new ArrayList<FieldSchema>();
 -    factColumns.add(new FieldSchema("msr11", "double", "third measure"));
 -    factColumns.add(new FieldSchema("msr15", "int", "fifteenth measure"));
 -
 -    // add dimensions of the cube
 -    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
 -    factColumns.add(new FieldSchema("processing_time", "timestamp", "processing time"));
 -    factColumns.add(new FieldSchema("dim1", "string", "base dim"));
 -    factColumns.add(new FieldSchema("dim11", "string", "base dim"));
 -    factColumns.add(new FieldSchema("dim12", "string", "base dim"));
 -
 -    Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
 -    Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
 -    updates.add(CONTINUOUS);
 -    storageAggregatePeriods.put(c0, updates);
 -
 -    StorageTableDesc s0 = new StorageTableDesc();
 -    s0.setInputFormat(TextInputFormat.class.getCanonicalName());
 -    s0.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
 -
 -    Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
 -    storageTables.put(c0, s0);
 -    Map<String, String> properties = Maps.newHashMap(factValidityProperties);
 -    properties.put(MetastoreConstants.FACT_ABSOLUTE_START_TIME, DateUtil.relativeToAbsolute("now.day - 3 days"));
 -
 -    client.createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 100L, properties,
 -      storageTables);
 -  }
 -
 -  private void createCubeFact(CubeMetastoreClient client) throws Exception {
 +  private void assertTestFactTimelineClass(CubeMetastoreClient client) throws Exception {
      String factName = "testFact";
 -    List<FieldSchema> factColumns = new ArrayList<FieldSchema>(cubeMeasures.size());
 -    for (CubeMeasure measure : cubeMeasures) {
 -      if (!measure.getColumn().getName().equals("msr15")) { //do not add msr15
 -        factColumns.add(measure.getColumn());
 -      }
 -    }
 -    factColumns.add(new FieldSchema("msr5", "double", "msr5"));
 -
 -    // add dimensions of the cube
 -    factColumns.add(new FieldSchema("zipcode", "int", "zip"));
 -    factColumns.add(new FieldSchema("cityid", "int", "city id"));
 -    factColumns.add(new FieldSchema("cityid1", "int", "city id"));
 -    factColumns.add(new FieldSchema("stateid", "int", "city id"));
 -    factColumns.add(new FieldSchema("test_time_dim_day_id", "int", "time id"));
 -    factColumns.add(new FieldSchema("test_time_dim_day_id2", "int", "time id"));
 -    factColumns.add(new FieldSchema("ambigdim1", "string", "used in" + " testColumnAmbiguity"));
 -
 -    Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
 -    Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
 -    updates.add(MINUTELY);
 -    updates.add(HOURLY);
 -    updates.add(DAILY);
 -    updates.add(MONTHLY);
 -    updates.add(QUARTERLY);
 -    updates.add(YEARLY);
 -
 -    ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
 -    List<String> timePartCols = new ArrayList<String>();
 -    partCols.add(TestCubeMetastoreClient.getDatePartition());
 -    timePartCols.add(TestCubeMetastoreClient.getDatePartitionKey());
 -
 -    StorageTableDesc s1 = new StorageTableDesc();
 -    s1.setInputFormat(TextInputFormat.class.getCanonicalName());
 -    s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
 -    s1.setPartCols(partCols);
 -    s1.setTimePartCols(timePartCols);
 -
 -    StorageTableDesc s2 = new StorageTableDesc();
 -    s2.setInputFormat(TextInputFormat.class.getCanonicalName());
 -    s2.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
 -    ArrayList<FieldSchema> s2PartCols = new ArrayList<FieldSchema>();
 -    s2PartCols.add(new FieldSchema("ttd", serdeConstants.STRING_TYPE_NAME, "test date partition"));
 -    s2PartCols.add(new FieldSchema("ttd2", serdeConstants.STRING_TYPE_NAME, "test date partition"));
 -    s2.setPartCols(s2PartCols);
 -    s2.setTimePartCols(Arrays.asList("ttd", "ttd2"));
 -
 -    StorageTableDesc s3 = new StorageTableDesc();
 -    s3.setInputFormat(TextInputFormat.class.getCanonicalName());
 -    s3.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
 -    s3.setPartCols(partCols);
 -    s3.setTimePartCols(timePartCols);
 -    s3.getTblProps().put(MetastoreUtil.getStoragetableStartTimesKey(), "now.day - 90 days");
 -    s3.getTblProps().put(MetastoreUtil.getStoragetableEndTimesKey(), "now.day - 10 days");
 -
 -    StorageTableDesc s5 = new StorageTableDesc();
 -    s5.setInputFormat(TextInputFormat.class.getCanonicalName());
 -    s5.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
 -    s5.setPartCols(partCols);
 -    s5.setTimePartCols(timePartCols);
 -    s5.getTblProps().put(MetastoreUtil.getStoragetableStartTimesKey(), "now.day - 10 days");
 -
 -    storageAggregatePeriods.put(c1, updates);
 -    storageAggregatePeriods.put(c2, updates);
 -    storageAggregatePeriods.put(c3, updates);
 -    storageAggregatePeriods.put(c4, updates);
 -    storageAggregatePeriods.put(c5, updates);
 -
 -    Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
 -    storageTables.put(c1, s1);
 -    storageTables.put(c4, s2);
 -    storageTables.put(c2, s1);
 -    storageTables.put(c3, s3);
 -    storageTables.put(c5, s5);
 -
 -    //add storage with continuous update period
 -    updates.add(CONTINUOUS);
 -    storageAggregatePeriods.put(c0, updates);
 -    StorageTableDesc s0 = new StorageTableDesc();
 -    s0.setInputFormat(TextInputFormat.class.getCanonicalName());
 -    s0.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
 -    storageTables.put(c0, s0);
 -
 -    // create cube fact
 -    client.createCubeFactTable(TEST_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L,
 -      factValidityProperties, storageTables);
 +
      client.getTimelines(factName, c1, null, null);
      client.getTimelines(factName, c4, null, null);
 +
      client.clearHiveTableCache();
 +
      CubeFactTable fact = client.getFactTable(factName);
      Table table = client.getTable(MetastoreUtil.getStorageTableName(fact.getName(), Storage.getPrefix(c1)));
      assertEquals(table.getParameters().get(MetastoreUtil.getPartitionTimelineCachePresenceKey()), "true");

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
----------------------------------------------------------------------
diff --cc lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
index 897891c,cabb95e..4331843
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBetweenTimeRangeWriter.java
@@@ -149,9 -163,18 +149,10 @@@ public class TestBetweenTimeRangeWrite
      validateBetweenBoundTypes(whereClause, DAY_DB_FORMAT, testStartOffset, testEndOffset);
    }
  
 -  private void validateBetweenBoundTypes(String whereClause, DateFormat format,
 -    int testStartOffset, int testEndOffset) {
 -    String expected = null;
 -    if (format == null) {
 -      expected =
 -        getBetweenClause("test", "dt", getDateWithOffset(DAILY, testStartOffset),
 -          getDateWithOffset(DAILY, testEndOffset), DAILY.format());
 -    } else {
 -      expected =
 -        getBetweenClause("test", "dt", getDateWithOffset(DAILY, testStartOffset),
 -          getDateWithOffset(DAILY, testEndOffset), format);
 -    }
++
 +  private void validateBetweenBoundTypes(String whereClause, DateFormat format, int testStartOffset, int testEndOffset) {
 +    String expected = getBetweenClause("test", "dt", getDateWithOffset(DAILY, testStartOffset),
 +      getDateWithOffset(DAILY, testEndOffset), ofNullable(format).orElseGet(DAILY::format));
      Assert.assertEquals(expected, whereClause);
    }
  }

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lens/blob/3ba2fad1/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --cc lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
index abaae5b,24660e1..62efc2d
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
@@@ -227,9 -233,23 +227,8 @@@ public class CubeMetastoreServiceImpl e
    @Override
    public void createDimensionTable(LensSessionHandle sessionid, XDimensionTable xDimTable) throws LensException {
      String dimTblName = xDimTable.getTableName();
 -    List<FieldSchema> columns = JAXBUtils.fieldSchemaListFromColumns(xDimTable.getColumns());
 -    Map<String, UpdatePeriod> updatePeriodMap =
 -      JAXBUtils.dumpPeriodsFromStorageTables(xDimTable.getStorageTables());
 -
 -    Map<String, String> properties = JAXBUtils.mapFromXProperties(xDimTable.getProperties());
 -    Map<String, StorageTableDesc> storageDesc = JAXBUtils.tableDescPrefixMapFromXStorageTables(
 -      xDimTable.getStorageTables());
--
      try (SessionContext ignored = new SessionContext(sessionid)){
 -      log.info("# Columns: " + columns);
 -      getClient(sessionid).createCubeDimensionTable(xDimTable.getDimensionName(),
 -        dimTblName,
 -        columns,
 -        xDimTable.getWeight(),
 -        updatePeriodMap,
 -        properties,
 -        storageDesc);
 +      getClient(sessionid).createCubeDimensionTable(xDimTable);
        log.info("Dimension Table created " + xDimTable.getTableName());
      }
    }