You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by pr...@apache.org on 2015/09/25 08:46:45 UTC

lens git commit: LENS-752: Support flattening of columns selected through bridge-tables(many-to-many relationships)

Repository: lens
Updated Branches:
  refs/heads/master d524cc24e -> b2c8fc02c


LENS-752: Support flattening of columns selected through bridge-tables(many-to-many relationships)


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/b2c8fc02
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/b2c8fc02
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/b2c8fc02

Branch: refs/heads/master
Commit: b2c8fc02c8e194eb9b11151fc4a805b6b77d68cf
Parents: d524cc2
Author: Amareshwari Sriramadasu <am...@gmail.com>
Authored: Fri Sep 25 12:16:06 2015 +0530
Committer: Rajat Khandelwal <ra...@gmail.com>
Committed: Fri Sep 25 12:16:06 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/cube-0.1.xsd        |   8 +
 .../apache/lens/cli/TestLensCubeCommands.java   |   2 +-
 .../apache/lens/cube/metadata/JoinChain.java    |   6 +-
 .../lens/cube/metadata/MetastoreUtil.java       |   1 +
 .../apache/lens/cube/metadata/SchemaGraph.java  |  45 ++--
 .../lens/cube/metadata/TableReference.java      |  73 ++-----
 .../lens/cube/parse/CubeQueryConfUtil.java      |   4 +
 .../apache/lens/cube/parse/JoinResolver.java    | 147 +++++++++++--
 .../src/main/resources/olap-query-conf.xml      |  16 ++
 .../apache/lens/cube/parse/CubeTestSetup.java   | 210 ++++++++++++++++++-
 .../lens/cube/parse/TestJoinResolver.java       | 193 ++++++++++++++++-
 .../apache/lens/server/metastore/JAXBUtils.java |   9 +-
 .../server/metastore/TestMetastoreService.java  |   4 +-
 src/site/apt/user/olap-cube.apt                 |  99 +++++++++
 src/site/apt/user/olap-query-conf.apt           |  44 ++--
 15 files changed, 714 insertions(+), 147 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/lens-api/src/main/resources/cube-0.1.xsd
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/cube-0.1.xsd b/lens-api/src/main/resources/cube-0.1.xsd
index 58f68f5..5d7630d 100644
--- a/lens-api/src/main/resources/cube-0.1.xsd
+++ b/lens-api/src/main/resources/cube-0.1.xsd
@@ -521,6 +521,14 @@
   <xs:complexType name="x_table_reference">
     <xs:attribute type="xs:string" name="table" use="required"/>
     <xs:attribute type="xs:string" name="column" use="required"/>
+    <xs:attribute type="xs:boolean" name="maps_to_many" use="optional" default="false">
+      <xs:annotation>
+        <xs:documentation>
+          This boolean is used to specify if the destination reference can map to many rows for single value of source
+          reference. Mainly signifies if there are bridge tables.
+        </xs:documentation>
+      </xs:annotation>
+    </xs:attribute>
   </xs:complexType>
 
   <xs:complexType name="x_table_references">

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java
index 39441c9..b553739 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensCubeCommands.java
@@ -103,7 +103,6 @@ public class TestLensCubeCommands extends LensCliApplicationTest {
     chain1.setName("testdetailchain");
     chain1.getPaths().getPath().add(path);
     chain1.setDestTable("test_detail");
-    chains.getJoinChain().add(chain1);
     XJoinChain chain2 = new XJoinChain();
     chain2.setPaths(new XJoinPaths());
     XJoinPath path2 = new XJoinPath();
@@ -122,6 +121,7 @@ public class TestLensCubeCommands extends LensCliApplicationTest {
     chain2.getPaths().getPath().add(path2);
     chain2.setDestTable("test_dim");
     chains.getJoinChain().add(chain2);
+    chains.getJoinChain().add(chain1);
     assertEquals(joinChains, new XJoinChainTable(chains).toString());
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java
index e394e20..6250905 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/JoinChain.java
@@ -178,7 +178,8 @@ public class JoinChain implements Named {
         relationShip = new TableRelationship(from.getDestColumn(),
           client.getDimension(from.getDestTable()),
           to.getDestColumn(),
-          client.getDimension(to.getDestTable()));
+          client.getDimension(to.getDestTable()),
+          to.isMapsToMany());
       }
       return relationShip;
     }
@@ -203,7 +204,8 @@ public class JoinChain implements Named {
           relationShip = new TableRelationship(from.getDestColumn(),
             fromTable,
             to.getDestColumn(),
-            client.getDimension(to.getDestTable()));
+            client.getDimension(to.getDestTable()),
+            to.isMapsToMany());
         }
       }
       return relationShip;

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
index bf27b99..2796cd9 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
@@ -130,6 +130,7 @@ public class MetastoreUtil {
     for (int i = 0; i < references.size(); i++) {
       TableReference reference = references.get(i);
       toks[i] = reference.getDestTable() + TABLE_COLUMN_SEPERATOR + reference.getDestColumn();
+      toks[i] += TABLE_COLUMN_SEPERATOR + reference.isMapsToMany();
     }
 
     return StringUtils.join(toks, ',');

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/lens-cube/src/main/java/org/apache/lens/cube/metadata/SchemaGraph.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/SchemaGraph.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/SchemaGraph.java
index 1a37e80..fa230ef 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/SchemaGraph.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/SchemaGraph.java
@@ -22,50 +22,30 @@ import java.util.*;
 
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 
-import lombok.Getter;
+import lombok.AllArgsConstructor;
+import lombok.Data;
+import lombok.RequiredArgsConstructor;
 
 public class SchemaGraph {
   /*
    * An edge in the schema graph
    */
+  @Data
+  @AllArgsConstructor
+  @RequiredArgsConstructor
   public static class TableRelationship {
-    @Getter
     final String fromColumn;
-    @Getter
     final AbstractCubeTable fromTable;
-    @Getter
     final String toColumn;
-    @Getter
     final AbstractCubeTable toTable;
-
-    public TableRelationship(String fromCol, AbstractCubeTable fromTab, String toCol, AbstractCubeTable toTab) {
-      fromColumn = fromCol;
-      fromTable = fromTab;
-      toColumn = toCol;
-      toTable = toTab;
-    }
+    boolean mapsToMany = false;
 
     @Override
     public String toString() {
-      return fromTable.getName() + "." + fromColumn + "->" + toTable.getName() + "." + toColumn;
-    }
-
-    @Override
-    public boolean equals(Object obj) {
-      if (!(obj instanceof TableRelationship)) {
-        return false;
-      }
-
-      TableRelationship other = (TableRelationship) obj;
-
-      return fromColumn.equals(other.fromColumn) && toColumn.equals(other.toColumn)
-        && fromTable.equals(other.fromTable) && toTable.equals(other.toTable);
+      return fromTable.getName() + "." + fromColumn + "->" + toTable.getName() + "." + toColumn
+        + (mapsToMany ? "[n]" : "");
     }
 
-    @Override
-    public int hashCode() {
-      return toString().hashCode();
-    }
   }
 
   /**
@@ -331,7 +311,7 @@ public class SchemaGraph {
           if (metastore.isDimension(destTableName)) {
             // Cube -> Dimension or Dimension -> Dimension reference
             Dimension relatedDim = metastore.getDimension(destTableName);
-            addLinks(refDim.getName(), cubeTable, destColumnName, relatedDim, outGraph, inGraph);
+            addLinks(refDim.getName(), cubeTable, destColumnName, relatedDim, ref.isMapsToMany(), outGraph, inGraph);
           } else {
             throw new HiveException("Dim -> Cube references are not supported: " + dim.getName() + "."
               + refDim.getName() + "->" + destTableName + "." + destColumnName);
@@ -342,9 +322,10 @@ public class SchemaGraph {
   }
 
   private void addLinks(String srcCol, AbstractCubeTable srcTbl, String destCol, AbstractCubeTable destTbl,
-    Map<AbstractCubeTable, Set<TableRelationship>> outGraph, Map<AbstractCubeTable, Set<TableRelationship>> inGraph) {
+    boolean mapsToMany, Map<AbstractCubeTable, Set<TableRelationship>> outGraph,
+    Map<AbstractCubeTable, Set<TableRelationship>> inGraph) {
 
-    TableRelationship rel = new TableRelationship(srcCol, srcTbl, destCol, destTbl);
+    TableRelationship rel = new TableRelationship(srcCol, srcTbl, destCol, destTbl, mapsToMany);
 
     Set<TableRelationship> inEdges = inGraph.get(destTbl);
     if (inEdges == null) {

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/lens-cube/src/main/java/org/apache/lens/cube/metadata/TableReference.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TableReference.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TableReference.java
index 31fd97b..24a5fe8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TableReference.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TableReference.java
@@ -18,80 +18,33 @@
  */
 package org.apache.lens.cube.metadata;
 
-public class TableReference {
-  private String destTable;
-  private String destColumn;
+import lombok.Data;
 
-  public TableReference() {
-  }
+@Data
+public class TableReference {
+  private final String destTable;
+  private final String destColumn;
+  private boolean mapsToMany = false;
 
   public TableReference(String destTable, String destColumn) {
+    this(destTable, destColumn, false);
+  }
+  public TableReference(String destTable, String destColumn, boolean mapsToMany) {
     this.destTable = destTable.toLowerCase();
     this.destColumn = destColumn.toLowerCase();
+    this.mapsToMany = mapsToMany;
   }
-
   public TableReference(String reference) {
     String[] desttoks = reference.split("\\.+");
     this.destTable = desttoks[0];
     this.destColumn = desttoks[1];
-  }
-
-  public String getDestTable() {
-    return destTable;
-  }
-
-  public void setDestTable(String dest) {
-    this.destTable = dest;
-  }
-
-  public String getDestColumn() {
-    return destColumn;
-  }
-
-  public void setDestColumn(String destColumn) {
-    this.destColumn = destColumn;
-  }
-
-  @Override
-  public boolean equals(Object obj) {
-    if (this == obj) {
-      return true;
-    }
-    if (obj == null) {
-      return false;
-    }
-    if (getClass() != obj.getClass()) {
-      return false;
+    if (desttoks.length > 2) {
+      this.mapsToMany = Boolean.parseBoolean(desttoks[2]);
     }
-    TableReference other = (TableReference) obj;
-    if (this.getDestColumn() == null) {
-      if (other.getDestColumn() != null) {
-        return false;
-      }
-    } else if (!this.getDestColumn().equals(other.getDestColumn())) {
-      return false;
-    }
-    if (this.getDestTable() == null) {
-      if (other.getDestTable() != null) {
-        return false;
-      }
-    } else if (!this.getDestTable().equals(other.getDestTable())) {
-      return false;
-    }
-    return true;
   }
-
   @Override
   public String toString() {
-    return destTable + "." + destColumn;
+    return destTable + "." + destColumn + (mapsToMany ? "[n]" : "");
   }
 
-  @Override
-  public int hashCode() {
-    final int prime = 31;
-    int result = 1;
-    result = prime * result + ((destColumn == null) ? 0 : destColumn.hashCode());
-    result = prime * result + ((destTable == null) ? 0 : destTable.hashCode());
-    return result;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
index aab2488..87972c8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
@@ -104,4 +104,8 @@ public final class CubeQueryConfUtil {
   public static final Class<? extends TimeRangeWriter> DEFAULT_TIME_RANGE_WRITER = ORTimeRangeWriter.class
     .asSubclass(TimeRangeWriter.class);
   public static final String PART_WHERE_CLAUSE_DATE_FORMAT = "lens.cube.query.partition.where.clause.format";
+  public static final String ENABLE_FLATTENING_FOR_BRIDGETABLES = "lens.cube.query.enable.flattening.bridge.tables";
+  public static final boolean DEFAULT_ENABLE_FLATTENING_FOR_BRIDGETABLES = false;
+  public static final String BRIDGE_TABLE_FIELD_AGGREGATOR = "lens.cube.query.bridge.table.field.aggregator";
+  public static final String DEFAULT_BRIDGE_TABLE_FIELD_AGGREGATOR = "collect_set";
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index c7f1e2a..a916159 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -325,11 +325,13 @@ class JoinResolver implements ContextRewriter {
     @Getter
     @Setter
     JoinClause minCostClause;
+    private final boolean flattenBridgeTables;
+    private final String bridgeTableFieldAggr;
 
     public AutoJoinContext(Map<Aliased<Dimension>, List<SchemaGraph.JoinPath>> allPaths,
       Map<Dimension, OptionalDimCtx> optionalDimensions, Map<AbstractCubeTable, String> partialJoinConditions,
       boolean partialJoinChains, Map<AbstractCubeTable, JoinType> tableJoinTypeMap, AbstractCubeTable autoJoinTarget,
-      String joinTypeCfg, boolean joinsResolved) {
+      String joinTypeCfg, boolean joinsResolved, boolean flattenBridgeTables, String bridgeTableFieldAggr) {
       this.allPaths = allPaths;
       initJoinPathColumns();
       this.partialJoinConditions = partialJoinConditions;
@@ -338,6 +340,8 @@ class JoinResolver implements ContextRewriter {
       this.autoJoinTarget = autoJoinTarget;
       this.joinTypeCfg = joinTypeCfg;
       this.joinsResolved = joinsResolved;
+      this.flattenBridgeTables = flattenBridgeTables;
+      this.bridgeTableFieldAggr = bridgeTableFieldAggr;
       log.debug("All join paths:{}", allPaths);
       log.debug("Join path from columns:{}", joinPathFromColumns);
       log.debug("Join path to columns:{}", joinPathToColumns);
@@ -429,14 +433,15 @@ class JoinResolver implements ContextRewriter {
         return fromString;
       }
       // Compute the merged join clause string for the min cost joinclause
-      String clause = getMergedJoinClause(cubeql.getAutoJoinCtx().getJoinClause(fact), dimsToQuery);
+      String clause = getMergedJoinClause(cubeql, cubeql.getAutoJoinCtx().getJoinClause(fact), dimsToQuery);
 
       fromString += clause;
       return fromString;
     }
 
     // Some refactoring needed to account for multiple join paths
-    public String getMergedJoinClause(JoinClause joinClause, Map<Dimension, CandidateDim> dimsToQuery) {
+    public String getMergedJoinClause(CubeQueryContext cubeql, JoinClause joinClause,
+                                      Map<Dimension, CandidateDim> dimsToQuery) {
       Set<String> clauses = new LinkedHashSet<String>();
       String joinTypeStr = "";
       JoinType joinType = JoinType.INNER;
@@ -452,6 +457,14 @@ class JoinResolver implements ContextRewriter {
       }
 
       Iterator<JoinTree> iter = joinClause.joinTree.dft();
+      boolean hasBridgeTable = false;
+      boolean initedBridgeClauses = false;
+      StringBuilder bridgeSelectClause = new StringBuilder();
+      StringBuilder bridgeFromClause = new StringBuilder();
+      StringBuilder bridgeFilterClause = new StringBuilder();
+      StringBuilder bridgeJoinClause = new StringBuilder();
+      StringBuilder bridgeGroupbyClause = new StringBuilder();
+
       while (iter.hasNext()) {
         JoinTree cur = iter.next();
         if (partialJoinChains) {
@@ -462,14 +475,7 @@ class JoinResolver implements ContextRewriter {
         String toAlias, fromAlias;
         fromAlias = cur.parent.getAlias();
         toAlias = cur.getAlias();
-        StringBuilder clause = new StringBuilder(joinTypeStr).append(" join ");
-        // Add storage table name followed by alias
-        clause.append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
-
-        clause.append(" on ").append(fromAlias).append(".")
-          .append(rel.getFromColumn()).append(" = ").append(toAlias)
-          .append(".").append(rel.getToColumn());
-
+        hasBridgeTable = flattenBridgeTables && (hasBridgeTable || rel.isMapsToMany());
         // We have to push user specified filters for the joined tables
         String userFilter = null;
         // Partition condition on the tables also needs to be pushed depending
@@ -536,14 +542,113 @@ class JoinResolver implements ContextRewriter {
             (leftStorageFilter == null ? "" : leftStorageFilter)
               + (rightStorgeFilter == null ? "" : rightStorgeFilter);
         }
+        StringBuilder clause = new StringBuilder();
+
+        // if a bridge table is present in the path
+        if (hasBridgeTable) {
+          // if any relation has bridge table, the clause becomes the following :
+          // join (" select " + joinkey + " aggr over fields from bridge table + from bridgeTable + [where user/storage
+          // filters] + groupby joinkey) on joincond"
+          // Or
+          // " join (select " + joinkey + " aggr over fields from table reached through bridge table + from bridge table
+          // join <next tables> on join condition + [and user/storage filters] + groupby joinkey) on joincond
+          if (!initedBridgeClauses) {
+            // we just found a bridge table in the path we need to initialize the clauses for subquery required for
+            // aggregating fields of bridge table
+            // initiliaze select clause with join key
+            bridgeSelectClause.append(" (select ").append(toAlias).append(".").append(rel.getToColumn()).append(" as ")
+            .append(rel.getToColumn());
+            // group by join key
+            bridgeGroupbyClause.append(" group by ").append(toAlias).append(".").append(rel.getToColumn());
+            // from clause with bridge table
+            bridgeFromClause.append(" from ").append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
+            // we need to initialize filter clause with user filter clause or storgae filter if applicable
+            if (StringUtils.isNotBlank(userFilter)) {
+              bridgeFilterClause.append(userFilter);
+            }
+            if (StringUtils.isNotBlank(storageFilter)) {
+              if (StringUtils.isNotBlank(bridgeFilterClause.toString())) {
+                bridgeFilterClause.append(" and ");
+              }
+              bridgeFilterClause.append(storageFilter);
+            }
+            // initialize final join clause
+            bridgeJoinClause.append(" on ").append(fromAlias).append(".")
+              .append(rel.getFromColumn()).append(" = ").append("%s")
+              .append(".").append(rel.getToColumn());
+            initedBridgeClauses = true;
+          } else {
+            // if bridge clauses are already inited, this is a next table getting joined with bridge table
+            // we will append a simple join clause
+            bridgeFromClause.append(joinTypeStr).append(" join ");
+            bridgeFromClause.append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
+            bridgeFromClause.append(" on ").append(fromAlias).append(".")
+              .append(rel.getFromColumn()).append(" = ").append(toAlias)
+              .append(".").append(rel.getToColumn());
 
-        if (StringUtils.isNotBlank(userFilter)) {
-          clause.append(" and ").append(userFilter);
-        }
-        if (StringUtils.isNotBlank(storageFilter)) {
-          clause.append(" and ").append(storageFilter);
+            if (StringUtils.isNotBlank(userFilter)) {
+              bridgeFromClause.append(" and ").append(userFilter);
+            }
+            if (StringUtils.isNotBlank(storageFilter)) {
+              bridgeFromClause.append(" and ").append(storageFilter);
+            }
+          }
+          if (cubeql.getTblAliasToColumns().get(toAlias) != null
+            && !cubeql.getTblAliasToColumns().get(toAlias).isEmpty()) {
+            // there are fields selected from this table after seeing bridge table in path
+            // we should make subquery for this selection
+            clause.append(joinTypeStr).append(" join ");
+            clause.append(bridgeSelectClause.toString());
+            for (String col : cubeql.getTblAliasToColumns().get(toAlias)) {
+              clause.append(",").append(bridgeTableFieldAggr).append("(").append(toAlias)
+                .append(".").append(col)
+                .append(")")
+                .append(" as ").append(col);
+            }
+            String bridgeFrom = bridgeFromClause.toString();
+            clause.append(bridgeFrom);
+            String bridgeFilter = bridgeFilterClause.toString();
+            if (StringUtils.isNotBlank(bridgeFilter)) {
+              if (bridgeFrom.contains(" join ")) {
+                clause.append(" and ");
+              } else {
+                clause.append(" where");
+              }
+              clause.append(bridgeFilter.toString());
+            }
+            clause.append(bridgeGroupbyClause.toString());
+            clause.append(") ").append(toAlias);
+            clause.append(String.format(bridgeJoinClause.toString(), toAlias));
+            clauses.add(clause.toString());
+          }
+          if (cur.getSubtrees().isEmpty()) {
+            // clear bridge flags and builders, as there are no more clauses in this tree.
+            hasBridgeTable = false;
+            initedBridgeClauses = false;
+            bridgeSelectClause.setLength(0);
+            bridgeFromClause.setLength(0);
+            bridgeFilterClause.setLength(0);
+            bridgeJoinClause.setLength(0);
+            bridgeGroupbyClause.setLength(0);
+          }
+        } else {
+          // Simple join clause is :
+          // jointype + " join " + destTable + " on " + joincond + [" and" + userfilter] + ["and" + storageFilter]
+          clause.append(joinTypeStr).append(" join ");
+          //Add storage table name followed by alias
+          clause.append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
+          clause.append(" on ").append(fromAlias).append(".")
+            .append(rel.getFromColumn()).append(" = ").append(toAlias)
+            .append(".").append(rel.getToColumn());
+
+          if (StringUtils.isNotBlank(userFilter)) {
+            clause.append(" and ").append(userFilter);
+          }
+          if (StringUtils.isNotBlank(storageFilter)) {
+            clause.append(" and ").append(storageFilter);
+          }
+          clauses.add(clause.toString());
         }
-        clauses.add(clause.toString());
       }
       return StringUtils.join(clauses, "");
     }
@@ -933,7 +1038,6 @@ class JoinResolver implements ContextRewriter {
   private AbstractCubeTable target;
   private HashMap<Dimension, List<JoinChain>> dimensionInJoinChain = new HashMap<Dimension, List<JoinChain>>();
 
-
   public JoinResolver(Configuration conf) {
   }
 
@@ -1105,9 +1209,14 @@ class JoinResolver implements ContextRewriter {
       multipleJoinPaths.get(aliasedDimension).addAll(
         chain.getRelationEdges(cubeql.getMetastoreClient()));
     }
+    boolean flattenBridgeTables = cubeql.getConf().getBoolean(CubeQueryConfUtil.ENABLE_FLATTENING_FOR_BRIDGETABLES,
+      CubeQueryConfUtil.DEFAULT_ENABLE_FLATTENING_FOR_BRIDGETABLES);
+    String bridgeTableFieldAggr = cubeql.getConf().get(CubeQueryConfUtil.BRIDGE_TABLE_FIELD_AGGREGATOR,
+      CubeQueryConfUtil.DEFAULT_BRIDGE_TABLE_FIELD_AGGREGATOR);
     AutoJoinContext joinCtx =
       new AutoJoinContext(multipleJoinPaths, cubeql.optionalDimensions, partialJoinConditions, partialJoinChain,
-        tableJoinTypeMap, target, cubeql.getConf().get(CubeQueryConfUtil.JOIN_TYPE_KEY), true);
+        tableJoinTypeMap, target, cubeql.getConf().get(CubeQueryConfUtil.JOIN_TYPE_KEY), true, flattenBridgeTables,
+        bridgeTableFieldAggr);
     cubeql.setAutoJoinCtx(joinCtx);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/lens-cube/src/main/resources/olap-query-conf.xml
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/resources/olap-query-conf.xml b/lens-cube/src/main/resources/olap-query-conf.xml
index c9b8d0f..4c7b7fa 100644
--- a/lens-cube/src/main/resources/olap-query-conf.xml
+++ b/lens-cube/src/main/resources/olap-query-conf.xml
@@ -180,4 +180,20 @@
       projection wont be changed, result might include duplicate values.
     </description>
   </property>
+  <property>
+    <name>lens.cube.query.enable.flattening.bridge.tables</name>
+    <value>false</value>
+    <description>Flag specifies if fields selected have to be flattened or not, if they are coming from tables with many
+      to many relationship in join. If false, field selection will be simple join and selecting the field. If true, the
+      fields from bridge tables will be aggregated grouped by join key.
+    </description>
+  </property>
+  <property>
+    <name>lens.cube.query.bridge.table.field.aggregator</name>
+    <value>collect_set</value>
+    <description>The field aggregator function to be used for aggregating fields from bridge tables. Would be used
+      only when flattening is enabled. The value can be passed for each query. During typical deployments it would
+      be passed for each driver, as the function could be different for each driver.
+    </description>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 67f7ab9..705f19b 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -768,6 +768,9 @@ public class CubeTestSetup {
       new TableReference("testdim2", "id")));
     cubeDimensions2.add(new ReferencedDimAtrribute(new FieldSchema("dim22", "int", "ref dim"), "Dim2 refer",
       "dim2chain", "id", null, null, null));
+    cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("userid", "int", "userid")));
+    cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("xuserid", "int", "userid")));
+    cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("yuserid", "int", "userid")));
 
     Map<String, String> cubeProperties = new HashMap<String, String>();
     cubeProperties.put(MetastoreUtil.getCubeTimedDimensionListKey(BASE_CUBE_NAME),
@@ -862,13 +865,67 @@ public class CubeTestSetup {
             });
           }
         });
+        add(new JoinChain("userSports", "user-sports", "user sports") {
+          {
+            addPath(new ArrayList<TableReference>() {
+              {
+                add(new TableReference("basecube", "userid"));
+                add(new TableReference("userdim", "id"));
+                add(new TableReference("userdim", "id"));
+                add(new TableReference("user_interests", "user_id", true));
+                add(new TableReference("user_interests", "sport_id"));
+                add(new TableReference("sports", "id"));
+              }
+            });
+          }
+        });
+        add(new JoinChain("userInterestIds", "user-interestsIds", "user interest ids") {
+          {
+            addPath(new ArrayList<TableReference>() {
+              {
+                add(new TableReference("basecube", "userid"));
+                add(new TableReference("userdim", "id"));
+                add(new TableReference("userdim", "id"));
+                add(new TableReference("user_interests", "user_id", true));
+              }
+            });
+          }
+        });
+        add(new JoinChain("xuserSports", "xuser-sports", "xuser sports") {
+          {
+            addPath(new ArrayList<TableReference>() {
+              {
+                add(new TableReference("basecube", "xuserid"));
+                add(new TableReference("userdim", "id"));
+                add(new TableReference("userdim", "id"));
+                add(new TableReference("user_interests", "user_id", true));
+                add(new TableReference("user_interests", "sport_id"));
+                add(new TableReference("sports", "id"));
+              }
+            });
+          }
+        });
+        add(new JoinChain("yuserSports", "user-sports", "user sports") {
+          {
+            addPath(new ArrayList<TableReference>() {
+              {
+                add(new TableReference("basecube", "yuserid"));
+                add(new TableReference("userdim", "id"));
+                add(new TableReference("userdim", "id"));
+                add(new TableReference("user_interests", "user_id", true));
+                add(new TableReference("user_interests", "sport_id"));
+                add(new TableReference("sports", "id"));
+              }
+            });
+          }
+        });
       }
     };
 
     // add ref dim through chain
     cubeDimensions2.add(
-        new ReferencedDimAtrribute(new FieldSchema("cityStateCapital", "string", "State's capital thru city"),
-            "State's capital thru city", "cityState", "capital", null, null, null));
+      new ReferencedDimAtrribute(new FieldSchema("cityStateCapital", "string", "State's capital thru city"),
+        "State's capital thru city", "cityState", "capital", null, null, null));
     client.createCube(BASE_CUBE_NAME, cubeMeasures2, cubeDimensions2, exprs, joinchains, cubeProperties);
 
     Map<String, String> derivedProperties = new HashMap<String, String>();
@@ -890,6 +947,9 @@ public class CubeTestSetup {
     dimensions = new HashSet<String>();
     dimensions.add("cityid");
     dimensions.add("stateid");
+    dimensions.add("userid");
+    dimensions.add("xuserid");
+    dimensions.add("yuserid");
     dimensions.add("dim1");
     dimensions.add("dim2");
     dimensions.add("dim11");
@@ -965,7 +1025,10 @@ public class CubeTestSetup {
     factColumns.add(new FieldSchema("processing_time", "timestamp", "processing time"));
     factColumns.add(new FieldSchema("zipcode", "int", "zip"));
     factColumns.add(new FieldSchema("cityid", "int", "city id"));
-    factColumns.add(new FieldSchema("stateid", "int", "city id"));
+    factColumns.add(new FieldSchema("stateid", "int", "state id"));
+    factColumns.add(new FieldSchema("userid", "int", "user id"));
+    factColumns.add(new FieldSchema("xuserid", "int", "user id"));
+    factColumns.add(new FieldSchema("yuserid", "int", "user id"));
     factColumns.add(new FieldSchema("dim1", "string", "base dim"));
     factColumns.add(new FieldSchema("dim11", "string", "base dim"));
     factColumns.add(new FieldSchema("test_time_dim_hour_id", "int", "time id"));
@@ -985,7 +1048,9 @@ public class CubeTestSetup {
     factColumns.add(new FieldSchema("dim1", "string", "base dim"));
     factColumns.add(new FieldSchema("dim11", "string", "base dim"));
     factColumns.add(new FieldSchema("dim2", "int", "dim2 id"));
-
+    factColumns.add(new FieldSchema("userid", "int", "user id"));
+    factColumns.add(new FieldSchema("xuserid", "int", "user id"));
+    factColumns.add(new FieldSchema("yuserid", "int", "user id"));
     // create cube fact
     client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, storageAggregatePeriods, 5L,
       factValidityProperties, storageTables);
@@ -2160,6 +2225,140 @@ public class CubeTestSetup {
     client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
   }
 
+  private void createUserTable(CubeMetastoreClient client) throws Exception {
+    String dimName = "userdim";
+
+    Set<CubeDimAttribute> dimAttrs = new HashSet<CubeDimAttribute>();
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "id")));
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "name")));
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("age", "string", "age")));
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("gender", "string", "gender")));
+    Map<String, String> dimProps = new HashMap<String, String>();
+    dimProps.put(MetastoreUtil.getDimTimedDimensionKey(dimName), TestCubeMetastoreClient.getDatePartitionKey());
+    Set<JoinChain> joinChains = new HashSet<JoinChain>();
+    joinChains.add(new JoinChain("userSports", "user-sports", "user sports") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("userdim", "id"));
+            add(new TableReference("user_interests", "user_id", true));
+            add(new TableReference("user_interests", "sport_id"));
+            add(new TableReference("sports", "id"));
+          }
+        });
+      }
+    });
+    Dimension userDim = new Dimension(dimName, dimAttrs, null, joinChains, dimProps,  0L);
+    client.createDimension(userDim);
+
+    String dimTblName = "usertable";
+    List<FieldSchema> dimColumns = new ArrayList<FieldSchema>();
+    dimColumns.add(new FieldSchema("id", "int", "id"));
+    dimColumns.add(new FieldSchema("name", "string", "name"));
+    dimColumns.add(new FieldSchema("age", "string", "age"));
+    dimColumns.add(new FieldSchema("gender", "string", "gender"));
+
+    Map<String, UpdatePeriod> dumpPeriods = new HashMap<String, UpdatePeriod>();
+    StorageTableDesc s1 = new StorageTableDesc();
+    s1.setInputFormat(TextInputFormat.class.getCanonicalName());
+    s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    dumpPeriods.put(c1, null);
+
+    ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
+    List<String> timePartCols = new ArrayList<String>();
+    partCols.add(TestCubeMetastoreClient.getDatePartition());
+    timePartCols.add(TestCubeMetastoreClient.getDatePartitionKey());
+    StorageTableDesc s2 = new StorageTableDesc();
+    s2.setInputFormat(TextInputFormat.class.getCanonicalName());
+    s2.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    s2.setPartCols(partCols);
+    s2.setTimePartCols(timePartCols);
+    dumpPeriods.put(c2, HOURLY);
+    Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
+    storageTables.put(c1, s1);
+    storageTables.put(c2, s2);
+
+    client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
+  }
+
+  private void createUserInterests(CubeMetastoreClient client) throws Exception {
+    String dimName = "user_interests";
+
+    Set<CubeDimAttribute> dimAttrs = new HashSet<CubeDimAttribute>();
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "id")));
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("user_id", "int", "user id")));
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("sport_id", "int", "sport id")));
+    Map<String, String> dimProps = new HashMap<String, String>();
+    dimProps.put(MetastoreUtil.getDimTimedDimensionKey(dimName), TestCubeMetastoreClient.getDatePartitionKey());
+    Dimension interestDim = new Dimension(dimName, dimAttrs, dimProps, 0L);
+    client.createDimension(interestDim);
+
+    String dimTblName = "user_interests_tbl";
+    List<FieldSchema> dimColumns = new ArrayList<FieldSchema>();
+    dimColumns.add(new FieldSchema("id", "int", "id"));
+    dimColumns.add(new FieldSchema("user_id", "int", "user id"));
+    dimColumns.add(new FieldSchema("sport_id", "int", "sport id"));
+
+    Map<String, UpdatePeriod> dumpPeriods = new HashMap<String, UpdatePeriod>();
+    StorageTableDesc s1 = new StorageTableDesc();
+    s1.setInputFormat(TextInputFormat.class.getCanonicalName());
+    s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    dumpPeriods.put(c1, null);
+
+    ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
+    List<String> timePartCols = new ArrayList<String>();
+    partCols.add(TestCubeMetastoreClient.getDatePartition());
+    timePartCols.add(TestCubeMetastoreClient.getDatePartitionKey());
+    StorageTableDesc s2 = new StorageTableDesc();
+    s2.setInputFormat(TextInputFormat.class.getCanonicalName());
+    s2.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    s2.setPartCols(partCols);
+    s2.setTimePartCols(timePartCols);
+    dumpPeriods.put(c2, HOURLY);
+    Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
+    storageTables.put(c1, s1);
+    storageTables.put(c2, s2);
+    client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
+  }
+
+  private void createSports(CubeMetastoreClient client) throws Exception {
+    String dimName = "sports";
+
+    Set<CubeDimAttribute> dimAttrs = new HashSet<CubeDimAttribute>();
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "id")));
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "name")));
+    Map<String, String> dimProps = new HashMap<String, String>();
+    dimProps.put(MetastoreUtil.getDimTimedDimensionKey(dimName), TestCubeMetastoreClient.getDatePartitionKey());
+    Dimension interestDim = new Dimension(dimName, dimAttrs, dimProps, 0L);
+    client.createDimension(interestDim);
+
+    String dimTblName = "sports_tbl";
+    List<FieldSchema> dimColumns = new ArrayList<FieldSchema>();
+    dimColumns.add(new FieldSchema("id", "int", "id"));
+    dimColumns.add(new FieldSchema("name", "string", "name"));
+
+    Map<String, UpdatePeriod> dumpPeriods = new HashMap<String, UpdatePeriod>();
+    StorageTableDesc s1 = new StorageTableDesc();
+    s1.setInputFormat(TextInputFormat.class.getCanonicalName());
+    s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    dumpPeriods.put(c1, null);
+
+    ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
+    List<String> timePartCols = new ArrayList<String>();
+    partCols.add(TestCubeMetastoreClient.getDatePartition());
+    timePartCols.add(TestCubeMetastoreClient.getDatePartitionKey());
+    StorageTableDesc s2 = new StorageTableDesc();
+    s2.setInputFormat(TextInputFormat.class.getCanonicalName());
+    s2.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    s2.setPartCols(partCols);
+    s2.setTimePartCols(timePartCols);
+    dumpPeriods.put(c2, HOURLY);
+    Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
+    storageTables.put(c1, s1);
+    storageTables.put(c2, s2);
+
+    client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 0L, dumpPeriods, dimProps, storageTables);
+  }
   public void createSources(HiveConf conf, String dbName) throws Exception {
     try {
       Database database = new Database();
@@ -2201,6 +2400,9 @@ public class CubeTestSetup {
       createStateTable(client);
       createCubeFactsWithValidColumns(client);
       createUnReachabletable(client);
+      createUserTable(client);
+      createSports(client);
+      createUserInterests(client);
     } catch (Exception exc) {
       log.error("Exception while creating sources.", exc);
       throw exc;

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
index cb63fad..7580541 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestJoinResolver.java
@@ -52,6 +52,7 @@ public class TestJoinResolver extends TestQueryRewrite {
     hconf.setBoolean(CubeQueryConfUtil.DISABLE_AUTO_JOINS, false);
     hconf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, true);
     hconf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, true);
+    hconf.setBoolean(CubeQueryConfUtil.ENABLE_FLATTENING_FOR_BRIDGETABLES, true);
     this.metastore = CubeMetastoreClient.getInstance(hconf);
   }
 
@@ -496,11 +497,13 @@ public class TestJoinResolver extends TestQueryRewrite {
     expected = getExpectedQuery("basecube",
       "select cubestatecountry.name, cubecitystatecountry.name, sum(basecube.msr2) FROM ",
       ""
-        + " join TestQueryRewrite.c1_citytable citydim on basecube.cityid = citydim.id and (citydim.dt = 'latest')"
-        + " join TestQueryRewrite.c1_statetable statedim_0 on citydim.stateid=statedim_0.id and statedim_0.dt='latest'"
-        + " join TestQueryRewrite.c1_countrytable cubecitystatecountry on statedim_0.countryid=cubecitystatecountry.id"
-        + " join TestQueryRewrite.c1_statetable statedim on basecube.stateid=statedim.id and (statedim.dt = 'latest')"
-        + " join TestQueryRewrite.c1_countrytable cubestatecountry on statedim.countryid=cubestatecountry.id "
+        + " join " + getDbName() + "c1_citytable citydim on basecube.cityid = citydim.id and (citydim.dt = 'latest')"
+        + " join " + getDbName()
+        + "c1_statetable statedim_0 on citydim.stateid=statedim_0.id and statedim_0.dt='latest'"
+        + " join " + getDbName()
+        + "c1_countrytable cubecitystatecountry on statedim_0.countryid=cubecitystatecountry.id"
+        + " join " + getDbName() + "c1_statetable statedim on basecube.stateid=statedim.id and (statedim.dt = 'latest')"
+        + " join " + getDbName() + "c1_countrytable cubestatecountry on statedim.countryid=cubestatecountry.id "
         + "", null, "group by cubestatecountry.name, cubecitystatecountry.name", null,
       getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base")
     );
@@ -752,4 +755,184 @@ public class TestJoinResolver extends TestQueryRewrite {
     assertNotNull(e2);
     assertEquals(e2.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
   }
+
+  @Test
+  public void testBridgeTablesWithoutDimtablePartitioning() throws Exception {
+    Configuration conf = new Configuration(hconf);
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, conf);
+    String expected = getExpectedQuery("basecube", "select usersports.name, sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
+        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name"
+        + " from " + getDbName() + "c1_user_interests_tbl user_interests"
+        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
+        + " group by user_interests.user_id) usersports"
+        + " on userdim.id = usersports.user_id ",
+      null, "group by usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testFlattenBridgeTablesOFF() throws Exception {
+    Configuration conf = new Configuration(hconf);
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    conf.setBoolean(CubeQueryConfUtil.ENABLE_FLATTENING_FOR_BRIDGETABLES, false);
+    String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, conf);
+    String expected = getExpectedQuery("basecube", "select usersports.name, sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
+        + " join " + getDbName() + "c1_user_interests_tbl user_interests on userdim.id = user_interests.user_id"
+        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id",
+      null, "group by usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testFlattenBridgeTablesWithCustomAggregate() throws Exception {
+    Configuration conf = new Configuration(hconf);
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    conf.set(CubeQueryConfUtil.BRIDGE_TABLE_FIELD_AGGREGATOR, "custom_aggr");
+    String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, conf);
+    String expected = getExpectedQuery("basecube", "select usersports.name, sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
+        + " join (select user_interests.user_id as user_id,custom_aggr(usersports.name) as name"
+        + " from " + getDbName() + "c1_user_interests_tbl user_interests"
+        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
+        + " group by user_interests.user_id) usersports"
+        + " on userdim.id = usersports.user_id ",
+      null, "group by usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testBridgeTablesWithMegringChains() throws Exception {
+    Configuration conf = new Configuration(hconf);
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    String query = "select userInterestIds.sport_id, usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, conf);
+    String expected = getExpectedQuery("basecube", "select userInterestIds.sport_id, usersports.name,"
+      + " sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c1_usertable userdim on basecube.userid = userdim.id join (select userinterestids"
+        + ".user_id as user_id,collect_set(userinterestids.sport_id) as sport_id from " + getDbName()
+        + "c1_user_interests_tbl userinterestids group by userinterestids.user_id) userinterestids on userdim.id = "
+        + "userinterestids.user_id join (select userinterestids.user_id as user_id,collect_set(usersports.name) as name"
+        + " from " + getDbName() + "c1_user_interests_tbl userinterestids join "
+        + getDbName() + "c1_sports_tbl usersports on userinterestids.sport_id = usersports.id"
+        + " group by userinterestids.user_id) usersports on userdim.id = usersports.user_id",
+       null, "group by userInterestIds.sport_id, usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testBridgeTablesWithMultipleFacts() throws Exception {
+    Configuration conf = new Configuration(hconf);
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    String query = "select usersports.name, sum(msr2), sum(msr12) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, conf);
+    String expected1 = getExpectedQuery("basecube", "select usersports.name name, sum(basecube.msr2) msr2 FROM ",
+      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
+        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name"
+        + " from " + getDbName() + "c1_user_interests_tbl user_interests"
+        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
+        + " group by user_interests.user_id) usersports"
+        + " on userdim.id = usersports.user_id ",
+      null, "group by usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    String expected2 = getExpectedQuery("basecube", "select usersports.name name, sum(basecube.msr12) msr12 FROM ",
+      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
+        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name"
+        + " from " + getDbName() + "c1_user_interests_tbl user_interests"
+        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
+        + " group by user_interests.user_id) usersports"
+        + " on userdim.id = usersports.user_id ",
+      null, "group by usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact2_base"));
+    TestCubeRewriter.compareContains(expected1, hqlQuery);
+    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    String lower = hqlQuery.toLowerCase();
+    assertTrue(
+      lower.startsWith("select coalesce(mq1.name, mq2.name) name, mq2.msr2 msr2, mq1.msr12 msr12 from ")
+      || lower.startsWith("select coalesce(mq1.name, mq2.name) name, mq1.msr2 msr2, mq2.msr12 msr12 from "), hqlQuery);
+
+    assertTrue(hqlQuery.contains("mq1 full outer join ") && hqlQuery.endsWith("mq2 on mq1.name <=> mq2.name"),
+      hqlQuery);
+  }
+
+  @Test
+  public void testBridgeTablesWithMultipleChains() throws Exception {
+    Configuration conf = new Configuration(hconf);
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    String query = "select usersports.name, xusersports.name, yusersports.name, sum(msr2) from basecube where "
+      + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, conf);
+    String expected = getExpectedQuery("basecube", "select usersports.name, xusersports.name, yusersports.name,"
+      + " sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c1_usertable userdim_1 on basecube.userid = userdim_1.id "
+      + " join  (select user_interests_1.user_id as user_id, collect_set(usersports.name) as name from "
+      + getDbName() + "c1_user_interests_tbl user_interests_1 join " + getDbName() + "c1_sports_tbl usersports on "
+      + "user_interests_1.sport_id = usersports.id group by user_interests_1.user_id) "
+      + "usersports on userdim_1.id = usersports.user_id"
+      + " join " + getDbName() + "c1_usertable userdim_0 on basecube.yuserid = userdim_0.id "
+      + " join  (select user_interests_0.user_id as user_id,collect_set(yusersports.name) as name from "
+      + getDbName() + "c1_user_interests_tbl user_interests_0 join " + getDbName() + "c1_sports_tbl yusersports on "
+      + " user_interests_0.sport_id = yusersports.id group by user_interests_0.user_id) yusersports on userdim_0.id ="
+      + " yusersports.user_id join " + getDbName() + "c1_usertable userdim on basecube.xuserid = userdim.id"
+      + " join  (select user_interests.user_id as user_id,collect_set(xusersports.name) as name from "
+      + getDbName() + "c1_user_interests_tbl user_interests join " + getDbName() + "c1_sports_tbl xusersports"
+      + " on user_interests.sport_id = xusersports.id group by user_interests.user_id) xusersports on userdim.id = "
+      + " xusersports.user_id", null, "group by usersports.name, xusersports.name, yusersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+  @Test
+  public void testBridgeTablesWithDimTablePartitioning() throws Exception {
+    Configuration conf = new Configuration(hconf);
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C2");
+    String query = "select usersports.name, sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, conf);
+    String expected = getExpectedQuery("basecube", "select usersports.name, sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c2_usertable userdim ON basecube.userid = userdim.id and userdim.dt='latest' "
+        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name"
+        + " from " + getDbName() + "c2_user_interests_tbl user_interests"
+        + " join " + getDbName() + "c2_sports_tbl usersports on user_interests.sport_id = usersports.id"
+        + " and usersports.dt='latest and user_interests.dt='latest'"
+        + " group by user_interests.user_id) usersports"
+        + " on userdim.id = usersports.user_id ",
+      null, "group by usersports.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c2_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
+
+  @Test
+  public void testBridgeTablesWithNormalJoins() throws Exception {
+    Configuration conf = new Configuration(hconf);
+    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1");
+    String query = "select usersports.name, cubestatecountry.name, cubecitystatecountry.name,"
+      + " sum(msr2) from basecube where " + TWO_DAYS_RANGE;
+    String hqlQuery = rewrite(query, conf);
+    String expected = getExpectedQuery("basecube", "select usersports.name, cubestatecountry.name, "
+      + "cubecitystatecountry.name, sum(basecube.msr2) FROM ",
+      " join " + getDbName() + "c1_usertable userdim ON basecube.userid = userdim.id "
+        + " join (select user_interests.user_id as user_id,collect_set(usersports.name) as name"
+        + " from " + getDbName() + "c1_user_interests_tbl user_interests"
+        + " join " + getDbName() + "c1_sports_tbl usersports on user_interests.sport_id = usersports.id"
+        + " group by user_interests.user_id) usersports"
+        + " on userdim.id = usersports.user_id "
+        + " join " + getDbName() + "c1_citytable citydim on basecube.cityid = citydim.id and (citydim.dt = 'latest')"
+        + " join " + getDbName()
+        + "c1_statetable statedim_0 on citydim.stateid=statedim_0.id and statedim_0.dt='latest'"
+        + " join " + getDbName()
+        + "c1_countrytable cubecitystatecountry on statedim_0.countryid=cubecitystatecountry.id"
+        + " join " + getDbName() + "c1_statetable statedim on basecube.stateid=statedim.id and (statedim.dt = 'latest')"
+        + " join " + getDbName() + "c1_countrytable cubestatecountry on statedim.countryid=cubestatecountry.id ",
+      null, "group by usersports.name, cubestatecountry.name, cubecitystatecountry.name", null,
+      getWhereForDailyAndHourly2days("basecube", "c1_testfact1_base"));
+    TestCubeRewriter.compareQueries(hqlQuery, expected);
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
index ed472f6..1ed81eb 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
@@ -177,7 +177,7 @@ public final class JAXBUtils {
         xd.getRefSpec().getTableReferences().getTableReference().size());
 
       for (XTableReference xRef : xd.getRefSpec().getTableReferences().getTableReference()) {
-        dimRefs.add(new TableReference(xRef.getTable(), xRef.getColumn()));
+        dimRefs.add(new TableReference(xRef.getTable(), xRef.getColumn(), xRef.isMapsToMany()));
       }
 
       hiveDim = new ReferencedDimAtrribute(new FieldSchema(xd.getName(), xd.getType().toLowerCase(),
@@ -427,6 +427,7 @@ public final class JAXBUtils {
     XTableReference xref = XCF.createXTableReference();
     xref.setTable(ref.getDestTable());
     xref.setColumn(ref.getDestColumn());
+    xref.setMapsToMany(ref.isMapsToMany());
     return xref;
   }
 
@@ -467,8 +468,9 @@ public final class JAXBUtils {
       List<TableReference> chain = new ArrayList<TableReference>(xchain.getEdges().getEdge().size() * 2);
 
       for (XJoinEdge xRef : xchain.getEdges().getEdge()) {
-        chain.add(new TableReference(xRef.getFrom().getTable(), xRef.getFrom().getColumn()));
-        chain.add(new TableReference(xRef.getTo().getTable(), xRef.getTo().getColumn()));
+        chain.add(new TableReference(xRef.getFrom().getTable(), xRef.getFrom().getColumn(),
+          xRef.getFrom().isMapsToMany()));
+        chain.add(new TableReference(xRef.getTo().getTable(), xRef.getTo().getColumn(), xRef.getTo().isMapsToMany()));
       }
       jc.addPath(chain);
     }
@@ -651,6 +653,7 @@ public final class JAXBUtils {
         XTableReference xRef = XCF.createXTableReference();
         xRef.setColumn(ref.getDestColumn());
         xRef.setTable(ref.getDestTable());
+        xRef.setMapsToMany(ref.isMapsToMany());
         xTabRefs.add(xRef);
       }
       return xTabRefs;

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
index f9be6e6..ca0fe56 100644
--- a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
@@ -347,6 +347,7 @@ public class TestMetastoreService extends LensJerseyTest {
     XTableReference link2 = new XTableReference();
     link2.setTable("testdim");
     link2.setColumn("col1");
+    link2.setMapsToMany(true);
     XJoinEdge edge1 = cubeObjectFactory.createXJoinEdge();
     edge1.setFrom(link1);
     edge1.setTo(link2);
@@ -702,7 +703,8 @@ public class TestMetastoreService extends LensJerseyTest {
       List<TableReference> links = chain1.getPaths().get(0).getReferences();
       Assert.assertEquals(links.size(), 2);
       Assert.assertEquals(links.get(0).toString(), "testgetcube.col1");
-      Assert.assertEquals(links.get(1).toString(), "testdim.col1");
+      Assert.assertTrue(links.get(1).isMapsToMany());
+      Assert.assertEquals(links.get(1).toString(), "testdim.col1[n]");
 
       final XDerivedCube dcube = createDerivedCube("testGetDerivedCube", "testGetCube");
       target = target().path("metastore").path("cubes");

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/src/site/apt/user/olap-cube.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/user/olap-cube.apt b/src/site/apt/user/olap-cube.apt
index ac91955..c47ac30 100644
--- a/src/site/apt/user/olap-cube.apt
+++ b/src/site/apt/user/olap-cube.apt
@@ -416,7 +416,106 @@ lens-shell>
 
 +---+
 
+<<Bridge tables>>
 
+* Bridge Table
+
+ A bridge table sits between a cube and a dimension or between two dimensions and is used to resolve
+ many-to-many relationships. Refer following for more details :
+
+ * {{{http://www.kimballgroup.com/data-warehouse-business-intelligence-resources/kimball-techniques/dimensional-modeling-techniques/multivalued-dimension-bridge-table/}
+ Kimball group's definition}}
+
+ * {{{http://www.kimballgroup.com/2012/02/design-tip-142-building-bridges/} Kimball group's design tip}}
+
+ * {{{http://www.pythian.com/blog/implementing-many-to-many-relationships-in-data-warehousing/} Pythian blog}}
+
+
+ User can specify if any destination link in join-chain maps to many-many relationship during the creation of
+ cube/dimension.
+
+* Flattening feature
+
+ When we look at the following example :
+
+ User :
+
+*--+--+--+
+||ID||	Name||	Gender||
+*--+--+--+
+|1|	A|	M|
+*--+--+--+
+|2|	B|	M|
+*--+--+--+
+|3|	C|	F|
+*--+--+--+
+
+ User interests :
+
+*--+--+
+||UserID||	Sports ID||
+*--+--+
+|1|	1|
+*--+--+
+|1|	2|
+*--+--+
+|2|	1|
+*--+--+
+|2|	2|
+*--+--+
+|2|	3|
+*--+--+
+
+ Sports :
+
+*--+--+
+||SportsID||	Description||
+*--+--+
+|1|	Football|
+*--+--+
+|2|	Cricket|
+*--+--+
+|3|	Basketball|
+*--+--+
+
+ User Interests is the bridge table which is capturing the many-to-many relationship between Users and Sports.
+ And if we have a fact as follows :
+
+*--+--+
+||UserId||	Revenue||
+*--+--+
+|1|	100|
+*--+--+
+|2|	50|
+*--+--+
+
+ If analyst is interested in analyzing with respect to user's interested sport, then the report would look the
+ following :
+
+*--+--+
+||User's sport||	Revenue||
+*--+--+
+|Football|	150|
+*--+--+
+|Cricket|	150|
+*--+--+
+|BasketBall|	50|
+*--+--+
+
+ Though the individual rows are correct and the overall revenue is actually 150, looking at above report makes people
+ assume that overall revenue is 350. The flattening feature to optionally flatten the selected fields, if
+ fields involved are coming from bridge tables in join path. If flattening is enabled, the report would be the following :
+
+*--+--+
+||User Interest||	Revenue||
+*--+--+
+|Football, Cricket|	100|
+*--+--+
+|Football, Cricket, BasketBall|	50|
+*--+--+
+
+ See configuration params available at {{{./olap-query-conf.html} OLAP query configurations}} and look for config
+ related to bridge tables, for turning this on.
 
 << Query API >>
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b2c8fc02/src/site/apt/user/olap-query-conf.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/user/olap-query-conf.apt b/src/site/apt/user/olap-query-conf.apt
index 8204c34..6606d42 100644
--- a/src/site/apt/user/olap-query-conf.apt
+++ b/src/site/apt/user/olap-query-conf.apt
@@ -24,44 +24,48 @@ OLAP query configuration
 *--+--+---+--+
 |<<No.>>|<<Property Name>>|<<Default Value>>|<<Description>>|
 *--+--+---+--+
-|1|lens.cube.query.disable.aggregate.resolver|true|Tells whether to disable automatic resolution of aggregations for measures in a cube. To enable automatic resolution, this value should be false.|
+|1|lens.cube.query.bridge.table.field.aggregator|collect_set|The field aggregator function to be used for aggregating fields from bridge tables. Would be used only when flattening is enabled. The value can be passed for each query. During typical deployments it would be passed for each driver, as the function could be different for each driver.|
 *--+--+---+--+
-|2|lens.cube.query.disable.auto.join|true|Tells whether to disable automatic resolution of join conditions between tables involved. To enable automatic resolution, this value should be false.|
+|2|lens.cube.query.disable.aggregate.resolver|true|Tells whether to disable automatic resolution of aggregations for measures in a cube. To enable automatic resolution, this value should be false.|
 *--+--+---+--+
-|3|lens.cube.query.driver.supported.storages| |List of comma separated storage names that supported by a driver. If no value is specified, all storages are valid|
+|3|lens.cube.query.disable.auto.join|true|Tells whether to disable automatic resolution of join conditions between tables involved. To enable automatic resolution, this value should be false.|
 *--+--+---+--+
-|4|lens.cube.query.enable.attrfields.add.distinct|true|When the query has only attribute fields projected from cube and the flag is set to true, distinct clause will be added for the projection so that no duplicate values will be projected. If flag is set to false, projection wont be changed, result might include duplicate values.|
+|4|lens.cube.query.driver.supported.storages| |List of comma separated storage names that supported by a driver. If no value is specified, all storages are valid|
 *--+--+---+--+
-|5|lens.cube.query.fail.if.data.partial|false|Whether to fail the query of data is partial|
+|5|lens.cube.query.enable.attrfields.add.distinct|true|When the query has only attribute fields projected from cube and the flag is set to true, distinct clause will be added for the projection so that no duplicate values will be projected. If flag is set to false, projection wont be changed, result might include duplicate values.|
 *--+--+---+--+
-|6|lens.cube.query.join.type|INNER|Tells what the join type is, in-case of automatic resolution of joins is enabled|
+|6|lens.cube.query.enable.flattening.bridge.tables|false|Flag specifies if fields selected have to be flattened or not, if they are coming from tables with many to many relationship in join. If false, field selection will be simple join and selecting the field. If true, the fields from bridge tables will be aggregated grouped by join key.|
 *--+--+---+--+
-|7|lens.cube.query.lookahead.ptparts.forinterval.${interval}|1|The value of number of lookahead process time partitions for interval specified. Interval can be any Update period.|
+|7|lens.cube.query.fail.if.data.partial|false|Whether to fail the query of data is partial|
 *--+--+---+--+
-|8|lens.cube.query.max.interval| |Maximum value of the update period that the query timed dimensions can take values of. For example, if query involves month ranges, user can say query maximum interval is daily, then no monthly partitions will be picked.|
+|8|lens.cube.query.join.type|INNER|Tells what the join type is, in-case of automatic resolution of joins is enabled|
 *--+--+---+--+
-|9|lens.cube.query.nonexisting.partitions| |The list of comma separated non existing partitions, if query can run with partial data. The value will be set by the cube query rewriter|
+|9|lens.cube.query.lookahead.ptparts.forinterval.${interval}|1|The value of number of lookahead process time partitions for interval specified. Interval can be any Update period.|
 *--+--+---+--+
-|10|lens.cube.query.partition.where.clause.format| |The simple date format of how the queried partition should be put in where clause. If nothing is specified, it will use the format from org.apache.lens.cube.metadata.UpdatePeriod for each type of partition|
+|10|lens.cube.query.max.interval| |Maximum value of the update period that the query timed dimensions can take values of. For example, if query involves month ranges, user can say query maximum interval is daily, then no monthly partitions will be picked.|
 *--+--+---+--+
-|11|lens.cube.query.pick.lightest.fact.first|false|If set to true, lightest fact will be resolved first than resolving storages. Otherwise, storages will be resolved to check all partitions exist and then pick lightest fact among candidates|
+|11|lens.cube.query.nonexisting.partitions| |The list of comma separated non existing partitions, if query can run with partial data. The value will be set by the cube query rewriter|
 *--+--+---+--+
-|12|lens.cube.query.process.time.partition.column| |The column name which is a process time column. If process time column is specified, query rewriter will look ahead the partitions of other timed dimensions inside this column.|
+|12|lens.cube.query.partition.where.clause.format| |The simple date format of how the queried partition should be put in where clause. If nothing is specified, it will use the format from org.apache.lens.cube.metadata.UpdatePeriod for each type of partition|
 *--+--+---+--+
-|13|lens.cube.query.promote.groupby.toselect|false|Tells whether to promote group by clauses to be promoted to select expressions if they are already not projected. To enable automatic promotion, this value should be true.|
+|13|lens.cube.query.pick.lightest.fact.first|false|If set to true, lightest fact will be resolved first than resolving storages. Otherwise, storages will be resolved to check all partitions exist and then pick lightest fact among candidates|
 *--+--+---+--+
-|14|lens.cube.query.promote.select.togroupby|false|Tells whether to promote select expressions which is not inside any aggregate, to be promoted to groupby clauses, if they are already not part of groupby clauses. To enable automatic promotion, this value should be true.|
+|14|lens.cube.query.process.time.partition.column| |The column name which is a process time column. If process time column is specified, query rewriter will look ahead the partitions of other timed dimensions inside this column.|
 *--+--+---+--+
-|15|lens.cube.query.replace.timedim|true|Tells whether timedim attribute queried in the time range should be replaced with its corresponding partition column name.|
+|15|lens.cube.query.promote.groupby.toselect|false|Tells whether to promote group by clauses to be promoted to select expressions if they are already not projected. To enable automatic promotion, this value should be true.|
 *--+--+---+--+
-|16|lens.cube.query.time.range.writer.class|org.apache.lens.cube.parse.ORTimeRangeWriter|The timerange writer class which specifies how the resolved partitions in timeranges should be written in final query. Available writers are org.apache.lens.cube.parse.ORTimeRangeWriter and org.apache.lens.cube.parse.BetweenTimeRangeWriter|
+|16|lens.cube.query.promote.select.togroupby|false|Tells whether to promote select expressions which is not inside any aggregate, to be promoted to groupby clauses, if they are already not part of groupby clauses. To enable automatic promotion, this value should be true.|
 *--+--+---+--+
-|17|lens.cube.query.valid.${cubename}.facttables| |List of comma separated fact tables that are valid for cube. If no value is specified, all fact tables are valid|
+|17|lens.cube.query.replace.timedim|true|Tells whether timedim attribute queried in the time range should be replaced with its corresponding partition column name.|
 *--+--+---+--+
-|18|lens.cube.query.valid.dim.storgaetables| |List of comma separated dimension storage tables that are valid. If no value is specified, all tables are valid|
+|18|lens.cube.query.time.range.writer.class|org.apache.lens.cube.parse.ORTimeRangeWriter|The timerange writer class which specifies how the resolved partitions in timeranges should be written in final query. Available writers are org.apache.lens.cube.parse.ORTimeRangeWriter and org.apache.lens.cube.parse.BetweenTimeRangeWriter|
 *--+--+---+--+
-|19|lens.cube.query.valid.fact.${facttable}.storage.${storagename}.updateperiods| |List of comma separated update periods that are valid for a fact on a storage. If no value is specified, all update periods are valid|
+|19|lens.cube.query.valid.${cubename}.facttables| |List of comma separated fact tables that are valid for cube. If no value is specified, all fact tables are valid|
 *--+--+---+--+
-|20|lens.cube.query.valid.fact.${facttable}.storagetables| |List of comma separated storage tables that are valid for a fact. If no value is specified, all storage tables are valid|
+|20|lens.cube.query.valid.dim.storgaetables| |List of comma separated dimension storage tables that are valid. If no value is specified, all tables are valid|
+*--+--+---+--+
+|21|lens.cube.query.valid.fact.${facttable}.storage.${storagename}.updateperiods| |List of comma separated update periods that are valid for a fact on a storage. If no value is specified, all update periods are valid|
+*--+--+---+--+
+|22|lens.cube.query.valid.fact.${facttable}.storagetables| |List of comma separated storage tables that are valid for a fact. If no value is specified, all storage tables are valid|
 *--+--+---+--+
 The configuration parameters and their default values