You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by am...@apache.org on 2016/03/30 06:55:47 UTC

[3/3] lens git commit: LENS-788 : Push expressions before flattening and convert filters to array_contains filters for bridge tables

LENS-788 : Push expressions before flattening and convert filters to array_contains filters for bridge tables


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/dba885ca
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/dba885ca
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/dba885ca

Branch: refs/heads/master
Commit: dba885ca972e8a11281f1bc44ceb4b7dd3906ce0
Parents: 79b95f0
Author: Amareshwari Sriramadasu <am...@apache.org>
Authored: Wed Mar 30 10:25:20 2016 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Wed Mar 30 10:25:20 2016 +0530

----------------------------------------------------------------------
 .../apache/lens/cube/parse/CandidateFact.java   |  51 +-
 .../lens/cube/parse/CubeQueryConfUtil.java      |   5 +
 .../lens/cube/parse/CubeQueryContext.java       |  21 +-
 .../lens/cube/parse/DefaultAliasDecider.java    |  13 +-
 .../apache/lens/cube/parse/DefaultQueryAST.java |   5 +-
 .../apache/lens/cube/parse/DimHQLContext.java   |  29 +-
 .../lens/cube/parse/DimOnlyHQLContext.java      |  10 -
 .../org/apache/lens/cube/parse/HQLParser.java   |  49 +-
 .../apache/lens/cube/parse/JoinResolver.java    |   9 +-
 .../lens/cube/parse/MultiFactHQLContext.java    |   3 -
 .../org/apache/lens/cube/parse/QueryAST.java    |   6 +-
 .../parse/SingleFactMultiStorageHQLContext.java |   8 -
 .../SingleFactSingleStorageHQLContext.java      |  12 -
 .../lens/cube/parse/join/AutoJoinContext.java   | 104 +---
 .../cube/parse/join/BridgeTableJoinContext.java | 321 ++++++++++
 .../src/main/resources/olap-query-conf.xml      |  18 +
 .../apache/lens/cube/parse/CubeTestSetup.java   |  19 +-
 .../lens/cube/parse/TestBridgeTableQueries.java | 618 +++++++++++++++++--
 .../cube/parse/TestDefaultAliasDecider.java     |  53 ++
 .../apache/lens/cube/parse/TestHQLParser.java   |  82 +++
 .../org/apache/lens/cube/parse/TestQuery.java   |  18 +-
 .../cube/parse/join/TestBridgeTableJoinCtx.java | 122 ++++
 lens-dist/src/main/assembly/bin-dist.xml        |  16 +
 .../lens/examples/PopulateSampleMetastore.java  |   2 +
 .../apache/lens/examples/SampleMetastore.java   |   4 +
 .../src/main/resources/cube-queries.sql         |   6 +-
 .../resources/customer-interests-local-part.xml |  30 +
 .../resources/customer-interests-local.data     |   6 +
 .../src/main/resources/customer-interests.xml   |  31 +
 .../main/resources/customer_interests_table.xml |  52 ++
 .../src/main/resources/db-storage-schema.sql    |  18 +
 .../src/main/resources/interests-local-part.xml |  29 +
 .../src/main/resources/interests-local.data     |   4 +
 lens-examples/src/main/resources/interests.xml  |  31 +
 .../src/main/resources/interests_table.xml      |  51 ++
 lens-examples/src/main/resources/sales-cube.xml |  24 +
 src/site/apt/user/olap-cube.apt                 |   8 +
 src/site/apt/user/olap-query-conf.apt           |  48 +-
 tools/conf/client/lens-client-site.xml          |  16 +-
 39 files changed, 1678 insertions(+), 274 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 82ca4f4..3f724b6 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -23,8 +23,6 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 import java.util.*;
 
 import org.apache.lens.cube.metadata.*;
-import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
-import org.apache.lens.cube.parse.HQLParser.TreeNode;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
@@ -76,6 +74,8 @@ public class CandidateFact implements CandidateTable, QueryAST {
   @Getter
   @Setter
   private Integer limitValue;
+  @Getter
+  private String fromString;
   private final List<Integer> selectIndices = Lists.newArrayList();
   private final List<Integer> dimFieldIndices = Lists.newArrayList();
   private Collection<String> columns;
@@ -166,7 +166,7 @@ public class CandidateFact implements CandidateTable, QueryAST {
   }
 
   public boolean isExpressionAnswerable(ASTNode node, CubeQueryContext context) throws LensException {
-    return getColumns().containsAll(getColsInExpr(context, context.getCube().getAllFieldNames(), node));
+    return getColumns().containsAll(HQLParser.getColsInExpr(context.getAliasForTableName(context.getCube()), node));
   }
 
   /**
@@ -182,7 +182,7 @@ public class CandidateFact implements CandidateTable, QueryAST {
     int currentChild = 0;
     for (int i = 0; i < cubeql.getSelectAST().getChildCount(); i++) {
       ASTNode selectExpr = (ASTNode) this.selectAST.getChild(currentChild);
-      Set<String> exprCols = getColsInExpr(cubeql, cubeCols, selectExpr);
+      Set<String> exprCols = HQLParser.getColsInExpr(cubeql.getAliasForTableName(cubeql.getCube()), selectExpr);
       if (getColumns().containsAll(exprCols)) {
         selectIndices.add(i);
         if (cubeql.getCube().getDimAttributeNames().containsAll(exprCols)) {
@@ -216,39 +216,6 @@ public class CandidateFact implements CandidateTable, QueryAST {
     // push down of having clauses happens just after this call in cubequerycontext
   }
 
-  private Set<String> getColsInExpr(final CubeQueryContext cubeql, final Set<String> cubeCols,
-    ASTNode expr) throws LensException {
-    final Set<String> cubeColsInExpr = new HashSet<>();
-    HQLParser.bft(expr, new ASTNodeVisitor() {
-      @Override
-      public void visit(TreeNode visited) {
-        ASTNode node = visited.getNode();
-        ASTNode parent = null;
-        if (visited.getParent() != null) {
-          parent = visited.getParent().getNode();
-        }
-
-        if (node.getToken().getType() == TOK_TABLE_OR_COL && (parent != null && parent.getToken().getType() != DOT)) {
-          // Take child ident.totext
-          ASTNode ident = (ASTNode) node.getChild(0);
-          String column = ident.getText().toLowerCase();
-          if (cubeCols.contains(column)) {
-            cubeColsInExpr.add(column);
-          }
-        } else if (node.getToken().getType() == DOT) {
-          String alias = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL, Identifier).getText().toLowerCase();
-          ASTNode colIdent = (ASTNode) node.getChild(1);
-          String column = colIdent.getText().toLowerCase();
-          if (cubeql.getAliasForTableName(cubeql.getCube()).equalsIgnoreCase(alias) && cubeCols.contains(column)) {
-            cubeColsInExpr.add(column);
-          }
-        }
-      }
-    });
-
-    return cubeColsInExpr;
-  }
-
   @Override
   public String getStorageString(String alias) {
     return StringUtils.join(storageTables, ",") + " " + alias;
@@ -371,4 +338,14 @@ public class CandidateFact implements CandidateTable, QueryAST {
     }
     return timePartDimensions;
   }
+
+  public void updateFromString(CubeQueryContext query, Set<Dimension> queryDims,
+    Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
+    fromString = "%s"; // to update the storage alias later
+    if (query.isAutoJoinResolved()) {
+      fromString =
+        query.getAutoJoinCtx().getFromString(fromString, this, queryDims, dimsToQuery,
+          query, this);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
index d96b567..a57292c 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryConfUtil.java
@@ -110,4 +110,9 @@ public final class CubeQueryConfUtil {
   public static final boolean DEFAULT_ENABLE_FLATTENING_FOR_BRIDGETABLES = false;
   public static final String BRIDGE_TABLE_FIELD_AGGREGATOR = "lens.cube.query.bridge.table.field.aggregator";
   public static final String DEFAULT_BRIDGE_TABLE_FIELD_AGGREGATOR = "collect_set";
+  public static final String DO_FLATTENING_OF_BRIDGE_TABLE_EARLY =
+    "lens.cube.query.flatten.bridge.tables.early";
+  public static final boolean DEFAULT_DO_FLATTENING_OF_BRIDGE_TABLE_EARLY = false;
+  public static final String BRIDGE_TABLE_FIELD_ARRAY_FILTER = "lens.cube.query.bridge.table.field.array.filter";
+  public static final String DEFAULT_BRIDGE_TABLE_FIELD_ARRAY_FILTER = "array_contains";
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index b8b6db9..c9b1475 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -170,7 +170,8 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
   @Getter
   private Map<Dimension, PruneCauses<CubeDimensionTable>> dimPruningMsgs =
     new HashMap<Dimension, PruneCauses<CubeDimensionTable>>();
-
+  @Getter
+  private String fromString;
   public CubeQueryContext(ASTNode ast, QB qb, Configuration queryConf, HiveConf metastoreConf)
     throws LensException {
     this.ast = ast;
@@ -628,6 +629,14 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
     }
   }
 
+  void updateFromString(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
+    fromString = "%s"; // storage string is updated later
+    if (isAutoJoinResolved()) {
+      fromString =
+        getAutoJoinCtx().getFromString(fromString, fact, dimsToQuery.keySet(), dimsToQuery, this, this);
+    }
+  }
+
   public String getSelectTree() {
     return HQLParser.getString(selectAST);
   }
@@ -925,7 +934,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
     log.info("facts:{}, dimsToQuery: {}", cfacts, dimsToQuery);
 
     // pick denorm tables for the picked fact and dimensions
-    Set<Dimension> denormTables = new HashSet<Dimension>();
+    Set<Dimension> denormTables = new HashSet<>();
     if (cfacts != null) {
       for (CandidateFact cfact : cfacts) {
         Set<Dimension> factDenormTables = deNormCtx.rewriteDenormctx(cfact, dimsToQuery, cfacts.size() > 1);
@@ -946,7 +955,7 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
     }
     if (autoJoinCtx != null) {
       // add optional dims from Join resolver
-      Set<Dimension> joiningTables = new HashSet<Dimension>();
+      Set<Dimension> joiningTables = new HashSet<>();
       if (cfacts != null && cfacts.size() > 1) {
         for (CandidateFact cfact : cfacts) {
           Set<Dimension> factJoiningTables = autoJoinCtx.pickOptionalTables(cfact, factDimMap.get(cfact), this);
@@ -968,9 +977,15 @@ public class CubeQueryContext implements TrackQueriedColumns, QueryAST {
           cfact.updateASTs(this);
         }
         whereAST = MultiFactHQLContext.convertHavingToWhere(havingAST, this, cfacts, new DefaultAliasDecider());
+        for (CandidateFact cFact : cfacts) {
+          cFact.updateFromString(this, factDimMap.get(cFact), dimsToQuery);
+        }
       }
     }
 
+    if (cfacts == null || cfacts.size() == 1) {
+      updateFromString(cfacts == null? null: cfacts.iterator().next(), dimsToQuery);
+    }
     hqlContext = createHQLContext(cfacts, dimsToQuery, factDimMap);
     return hqlContext.toHQL();
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
index dadbfa0..80ceae4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
@@ -25,7 +25,18 @@ public class DefaultAliasDecider implements AliasDecider {
   int counter = 0;
   private static final String ALIAS_PREFIX = "alias";
 
+  final String aliasPrefix;
+  public DefaultAliasDecider(String alias) {
+    aliasPrefix = alias;
+  }
+
+  DefaultAliasDecider() {
+    this(ALIAS_PREFIX);
+  }
   public String decideAlias(ASTNode node) {
-    return ALIAS_PREFIX + (counter++);
+    if (node == null) {
+      throw new NullPointerException("Node cannot be null");
+    }
+    return aliasPrefix + (counter++);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
index a403e36..0697e78 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
@@ -30,6 +30,7 @@ import lombok.Data;
 public class DefaultQueryAST implements QueryAST {
   private ASTNode selectAST, whereAST, groupByAST, havingAST, joinAST, orderByAST;
   private Integer limitValue;
+  private String fromString;
 
   public String getSelectTree() {
     return HQLParser.getString(selectAST);
@@ -49,7 +50,6 @@ public class DefaultQueryAST implements QueryAST {
     return null;
   }
 
-
   public String getHavingTree() {
     if (havingAST != null) {
       return HQLParser.getString(havingAST);
@@ -69,6 +69,7 @@ public class DefaultQueryAST implements QueryAST {
     LensException {
     return new DefaultQueryAST(ast.getSelectAST(),
       fact.getStorageWhereClause(storageTable.substring(storageTable.indexOf(".") + 1)),
-      ast.getGroupByAST(), ast.getHavingAST(), ast.getJoinAST(), ast.getOrderByAST(), ast.getLimitValue());
+      ast.getGroupByAST(), ast.getHavingAST(), ast.getJoinAST(), ast.getOrderByAST(), ast.getLimitValue(),
+      ast.getFromString());
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
index 318c82a..7c14be7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
@@ -40,27 +40,23 @@ abstract class DimHQLContext extends SimpleHQLContext {
   private final Set<Dimension> queriedDims;
   private String where;
   protected final CubeQueryContext query;
+  private final String astFromString;
 
   public CubeQueryContext getQuery() {
     return query;
   }
   DimHQLContext(CubeQueryContext query, Map<Dimension, CandidateDim> dimsToQuery,
     Set<Dimension> queriedDims, QueryAST ast) throws LensException {
-    this(query, dimsToQuery, queriedDims, ast.getSelectTree(), ast.getWhereTree(), ast.getGroupByTree(),
-      ast.getOrderByTree(), ast.getHavingTree(), ast.getLimitValue());
-  }
-  DimHQLContext(CubeQueryContext query, Map<Dimension, CandidateDim> dimsToQuery,
-    Set<Dimension> queriedDims, String select, String where,
-    String groupby, String orderby, String having, Integer limit) throws LensException {
-    super(select, groupby, orderby, having, limit);
+    super(ast.getSelectTree(), ast.getGroupByTree(), ast.getOrderByTree(), ast.getHavingTree(), ast.getLimitValue());
     this.query = query;
     this.dimsToQuery = dimsToQuery;
-    this.where = where;
+    this.where = ast.getWhereTree();
     this.queriedDims = queriedDims;
+    this.astFromString = ast.getFromString();
   }
 
   protected void setMissingExpressions() throws LensException {
-    setFrom(getFromString());
+    setFrom(String.format(astFromString, getFromTable()));
     setWhere(joinWithAnd(
       genWhereClauseWithDimPartitions(where), getQuery().getConf().getBoolean(
         CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, CubeQueryConfUtil.DEFAULT_REPLACE_TIMEDIM_WITH_PART_COL)
@@ -71,21 +67,6 @@ abstract class DimHQLContext extends SimpleHQLContext {
     return null;
   }
 
-
-
-  protected String getFromString() throws LensException {
-    String fromString = getFromTable();
-    if (query.isAutoJoinResolved()) {
-      fromString =
-        query.getAutoJoinCtx().getFromString(fromString, getQueriedFact(), getQueriedDimSet(), getDimsToQuery(), query);
-    }
-    return fromString;
-  }
-
-  protected abstract Set<Dimension> getQueriedDimSet();
-
-  protected abstract CandidateFact getQueriedFact();
-
   protected abstract String getFromTable() throws LensException;
 
   public Map<Dimension, CandidateDim> getDimsToQuery() {

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
index d22287b..6f6572e 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
@@ -55,14 +55,4 @@ class DimOnlyHQLContext extends DimHQLContext {
       return query.getQBFromString(null, getDimsToQuery());
     }
   }
-
-  @Override
-  protected Set<Dimension> getQueriedDimSet() {
-    return getDimsToQuery().keySet();
-  }
-
-  @Override
-  protected CandidateFact getQueriedFact() {
-    return null;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index 8d6105f..0d00188 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -37,10 +37,10 @@ import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.parse.*;
 
+import org.antlr.runtime.CommonToken;
 import org.antlr.runtime.tree.Tree;
 
 import com.google.common.base.Optional;
-
 import com.google.common.collect.Sets;
 import lombok.Data;
 import lombok.extern.slf4j.Slf4j;
@@ -60,6 +60,26 @@ public final class HQLParser {
       .getType() == HiveParser.TOK_TABLE_OR_COL && astNode.getChild(1).getType() == HiveParser.Identifier;
   }
 
+  public static boolean isPrimitiveBooleanExpression(ASTNode ast) {
+    return HQLParser.FILTER_OPERATORS.contains(ast.getType());
+  }
+
+  public static boolean isPrimitiveBooleanFunction(ASTNode ast) {
+    if (ast.getType() == TOK_FUNCTION) {
+      if (ast.getChild(0).getText().equals("in")) {
+        return true;
+      }
+    }
+    return false;
+  }
+  public static ASTNode getDotAST(String tableAlias, String fieldAlias) {
+    ASTNode child = new ASTNode(new CommonToken(DOT, "."));
+    child.addChild(new ASTNode(new CommonToken(TOK_TABLE_OR_COL, "TOK_TABLE_OR_COL")));
+    child.getChild(0).addChild(new ASTNode(new CommonToken(Identifier, tableAlias)));
+    child.addChild(new ASTNode(new CommonToken(Identifier, fieldAlias)));
+    return child;
+  }
+
   public interface ASTNodeVisitor {
     void visit(TreeNode node) throws LensException;
   }
@@ -147,8 +167,8 @@ public final class HQLParser {
     primitiveTypes.add(TOK_CHAR);
     PRIMITIVE_TYPES = Collections.unmodifiableSet(primitiveTypes);
 
-    FILTER_OPERATORS = Sets.newHashSet(KW_IN, GREATERTHAN, GREATERTHANOREQUALTO, LESSTHAN, LESSTHANOREQUALTO, EQUAL,
-      EQUAL_NS);
+    FILTER_OPERATORS = Sets.newHashSet(GREATERTHAN, GREATERTHANOREQUALTO, LESSTHAN, LESSTHANOREQUALTO, EQUAL,
+      EQUAL_NS, NOTEQUAL);
   }
 
   public static ASTNode parseHQL(String query, HiveConf conf) throws LensException {
@@ -720,6 +740,29 @@ public final class HQLParser {
     return colname;
   }
 
+  public static Set<String> getColsInExpr(final String tableAlias, ASTNode expr) throws LensException {
+    final Set<String> colsInExpr = new HashSet<>();
+    HQLParser.bft(expr, new ASTNodeVisitor() {
+      @Override
+      public void visit(TreeNode visited) {
+        ASTNode node = visited.getNode();
+        ASTNode parent = null;
+        if (visited.getParent() != null) {
+          parent = visited.getParent().getNode();
+        }
+        if (node.getToken().getType() == DOT) {
+          String alias = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL, Identifier).getText().toLowerCase();
+          ASTNode colIdent = (ASTNode) node.getChild(1);
+          String column = colIdent.getText().toLowerCase();
+          if (tableAlias.equalsIgnoreCase(alias)) {
+            colsInExpr.add(column);
+          }
+        }
+      }
+    });
+    return colsInExpr;
+  }
+
   public static boolean isAggregateAST(ASTNode node) {
     int exprTokenType = node.getType();
     if (exprTokenType == HiveParser.TOK_FUNCTION || exprTokenType == HiveParser.TOK_FUNCTIONDI

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index b861bb6..7dceee1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -65,7 +65,8 @@ class JoinResolver implements ContextRewriter {
 
     if (!joinResolverDisabled && (!cubeql.getNonChainedDimensions().isEmpty() && cubeql.hasCubeInQuery())
       || ((cubeql.getNonChainedDimensions().size() > 1) && !cubeql.hasCubeInQuery())) {
-      log.warn("Disabling auto join resolver as there are direct dimensions queried");
+      log.warn("Disabling auto join resolver as there are direct dimensions queried {}",
+        cubeql.getNonChainedDimensions());
       joinResolverDisabled = true;
     }
     if (joinResolverDisabled) {
@@ -156,12 +157,16 @@ class JoinResolver implements ContextRewriter {
       CubeQueryConfUtil.DEFAULT_ENABLE_FLATTENING_FOR_BRIDGETABLES);
     String bridgeTableFieldAggr = cubeql.getConf().get(CubeQueryConfUtil.BRIDGE_TABLE_FIELD_AGGREGATOR,
       CubeQueryConfUtil.DEFAULT_BRIDGE_TABLE_FIELD_AGGREGATOR);
+    String bridgeTableFieldArrayFilter = cubeql.getConf().get(CubeQueryConfUtil.BRIDGE_TABLE_FIELD_ARRAY_FILTER,
+      CubeQueryConfUtil.DEFAULT_BRIDGE_TABLE_FIELD_ARRAY_FILTER);
+    boolean doFlatteningEarly = cubeql.getConf().getBoolean(CubeQueryConfUtil.DO_FLATTENING_OF_BRIDGE_TABLE_EARLY,
+      CubeQueryConfUtil.DEFAULT_DO_FLATTENING_OF_BRIDGE_TABLE_EARLY);
     Set<Dimension> requiredDimensions = Sets.newHashSet(cubeql.getDimensions());
     requiredDimensions.removeAll(cubeql.getOptionalDimensions());
     AutoJoinContext joinCtx =
       new AutoJoinContext(multipleJoinPaths, requiredDimensions,
         tableJoinTypeMap, target, cubeql.getConf().get(CubeQueryConfUtil.JOIN_TYPE_KEY), true, flattenBridgeTables,
-        bridgeTableFieldAggr);
+        bridgeTableFieldAggr, bridgeTableFieldArrayFilter, doFlatteningEarly);
     cubeql.setAutoJoinCtx(joinCtx);
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
index 9c18b7e..4d6ce9e 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
@@ -230,7 +230,4 @@ class MultiFactHQLContext extends SimpleHQLContext {
     return null;
   }
 
-  private static boolean isPrimitiveBooleanExpression(ASTNode ast) {
-    return HQLParser.FILTER_OPERATORS.contains(ast.getType());
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
index 31680ca..f064dcb 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
@@ -18,14 +18,14 @@
  */
 package org.apache.lens.cube.parse;
 
-
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 
-
-interface QueryAST {
+public interface QueryAST {
 
   String getSelectTree();
 
+  String getFromString();
+
   String getWhereTree();
 
   String getHavingTree();

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
index 63cb388..9ee94d3 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
@@ -192,14 +192,6 @@ public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
     ast.getSelectAST().addChild(selectExprAST);
   }
 
-  private ASTNode getDotAST(String tableAlias, String fieldAlias) {
-    ASTNode child = new ASTNode(new CommonToken(DOT, "."));
-    child.addChild(new ASTNode(new CommonToken(TOK_TABLE_OR_COL, "TOK_TABLE_OR_COL")));
-    child.getChild(0).addChild(new ASTNode(new CommonToken(Identifier, tableAlias)));
-    child.addChild(new ASTNode(new CommonToken(Identifier, fieldAlias)));
-    return child;
-  }
-
   private static ArrayList<HQLContextInterface> getUnionContexts(CandidateFact fact, Map<Dimension, CandidateDim>
     dimsToQuery, CubeQueryContext query, QueryAST ast)
     throws LensException {

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java
index b1a3b3f..dbc84ed 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactSingleStorageHQLContext.java
@@ -33,7 +33,6 @@ import org.apache.lens.server.api.error.LensException;
 class SingleFactSingleStorageHQLContext extends DimOnlyHQLContext {
 
   private final CandidateFact fact;
-  private final Set<Dimension> queriedDimSet;
   private String storageAlias;
 
   SingleFactSingleStorageHQLContext(CandidateFact fact, Map<Dimension, CandidateDim> dimsToQuery,
@@ -47,7 +46,6 @@ class SingleFactSingleStorageHQLContext extends DimOnlyHQLContext {
     throws LensException {
     super(dimsToQuery, dimsQueried, query, ast);
     this.fact = fact;
-    this.queriedDimSet = dimsQueried;
   }
 
   SingleFactSingleStorageHQLContext(CandidateFact fact, String storageAlias, Map<Dimension, CandidateDim> dimsToQuery,
@@ -72,14 +70,4 @@ class SingleFactSingleStorageHQLContext extends DimOnlyHQLContext {
       }
     }
   }
-
-  @Override
-  protected CandidateFact getQueriedFact() {
-    return fact;
-  }
-
-  @Override
-  public Set<Dimension> getQueriedDimSet() {
-    return queriedDimSet;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
index e14a898..8b24f70 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
@@ -70,12 +70,15 @@ public class AutoJoinContext {
   JoinClause minCostClause;
   private final boolean flattenBridgeTables;
   private final String bridgeTableFieldAggr;
+  private final String bridgeTableFieldArrayFilter;
+  private final boolean doFlatteningEarly;
 
   public AutoJoinContext(Map<Aliased<Dimension>, List<JoinPath>> allPaths,
                          Set<Dimension> requiredDimensions,
                          Map<AbstractCubeTable, JoinType> tableJoinTypeMap,
                          AbstractCubeTable autoJoinTarget, String joinTypeCfg, boolean joinsResolved,
-                         boolean flattenBridgeTables, String bridgeTableFieldAggr) {
+                         boolean flattenBridgeTables, String bridgeTableFieldAggr, String bridgeTableFieldArrayFilter,
+                         boolean doFlatteningEarly) {
     this.allPaths = allPaths;
     this.requiredDimensions = requiredDimensions;
     initJoinPathColumns();
@@ -85,6 +88,8 @@ public class AutoJoinContext {
     this.joinsResolved = joinsResolved;
     this.flattenBridgeTables = flattenBridgeTables;
     this.bridgeTableFieldAggr = bridgeTableFieldAggr;
+    this.bridgeTableFieldArrayFilter = bridgeTableFieldArrayFilter;
+    this.doFlatteningEarly = doFlatteningEarly;
     log.debug("All join paths:{}", allPaths);
     log.debug("Join path from columns:{}", joinPathFromColumns);
     log.debug("Join path to columns:{}", joinPathToColumns);
@@ -165,22 +170,23 @@ public class AutoJoinContext {
   }
 
   public String getFromString(String fromTable, CandidateFact fact, Set<Dimension> qdims,
-    Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql) throws LensException {
+    Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql, QueryAST ast) throws LensException {
     String fromString = fromTable;
     log.info("All paths dump:{} Queried dims:{}", cubeql.getAutoJoinCtx().getAllPaths(), qdims);
     if (qdims == null || qdims.isEmpty()) {
       return fromString;
     }
     // Compute the merged join clause string for the min cost joinClause
-    String clause = getMergedJoinClause(cubeql, cubeql.getAutoJoinCtx().getJoinClause(fact), dimsToQuery);
+    String clause = getMergedJoinClause(cubeql, fact, ast,
+      cubeql.getAutoJoinCtx().getJoinClause(fact), dimsToQuery);
 
     fromString += clause;
     return fromString;
   }
 
   // Some refactoring needed to account for multiple join paths
-  public String getMergedJoinClause(CubeQueryContext cubeql, JoinClause joinClause,
-                                    Map<Dimension, CandidateDim> dimsToQuery) {
+  public String getMergedJoinClause(CubeQueryContext cubeql, CandidateFact fact, QueryAST ast, JoinClause joinClause,
+                                    Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
     Set<String> clauses = new LinkedHashSet<>();
     String joinTypeStr = "";
     JoinType joinType = JoinType.INNER;
@@ -192,12 +198,8 @@ public class AutoJoinContext {
 
     Iterator<JoinTree> iter = joinClause.getJoinTree().dft();
     boolean hasBridgeTable = false;
-    boolean initedBridgeClauses = false;
-    StringBuilder bridgeSelectClause = new StringBuilder();
-    StringBuilder bridgeFromClause = new StringBuilder();
-    StringBuilder bridgeFilterClause = new StringBuilder();
-    StringBuilder bridgeJoinClause = new StringBuilder();
-    StringBuilder bridgeGroupbyClause = new StringBuilder();
+    BridgeTableJoinContext bridgeTableJoinContext = new BridgeTableJoinContext(cubeql, fact, ast, bridgeTableFieldAggr,
+      bridgeTableFieldArrayFilter, doFlatteningEarly);
 
     while (iter.hasNext()) {
       JoinTree cur = iter.next();
@@ -252,90 +254,18 @@ public class AutoJoinContext {
 
       // if a bridge table is present in the path
       if (hasBridgeTable) {
-        // if any relation has bridge table, the clause becomes the following :
-        // join (" select " + joinkey + " aggr over fields from bridge table + from bridgeTable + [where user/storage
-        // filters] + groupby joinkey) on joincond"
-        // Or
-        // " join (select " + joinkey + " aggr over fields from table reached through bridge table + from bridge table
-        // join <next tables> on join condition + [and user/storage filters] + groupby joinkey) on joincond
-        if (!initedBridgeClauses) {
-          // we just found a bridge table in the path we need to initialize the clauses for subquery required for
-          // aggregating fields of bridge table
-          // initiliaze select clause with join key
-          bridgeSelectClause.append(" (select ").append(toAlias).append(".").append(rel.getToColumn()).append(" as ")
-          .append(rel.getToColumn());
-          // group by join key
-          bridgeGroupbyClause.append(" group by ").append(toAlias).append(".").append(rel.getToColumn());
-          // from clause with bridge table
-          bridgeFromClause.append(" from ").append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
-          // we need to initialize filter clause with user filter clause or storage filter if applicable
-          if (StringUtils.isNotBlank(userFilter)) {
-            bridgeFilterClause.append(userFilter);
-          }
-          if (StringUtils.isNotBlank(storageFilter)) {
-            if (StringUtils.isNotBlank(bridgeFilterClause.toString())) {
-              bridgeFilterClause.append(" and ");
-            }
-            bridgeFilterClause.append(storageFilter);
-          }
-          // initialize final join clause
-          bridgeJoinClause.append(" on ").append(fromAlias).append(".")
-            .append(rel.getFromColumn()).append(" = ").append("%s")
-            .append(".").append(rel.getToColumn());
-          initedBridgeClauses = true;
-        } else {
-          // if bridge clauses are already inited, this is a next table getting joined with bridge table
-          // we will append a simple join clause
-          bridgeFromClause.append(" join ");
-          bridgeFromClause.append(dimsToQuery.get(rel.getToTable()).getStorageString(toAlias));
-          bridgeFromClause.append(" on ").append(fromAlias).append(".")
-            .append(rel.getFromColumn()).append(" = ").append(toAlias)
-            .append(".").append(rel.getToColumn());
-
-          if (StringUtils.isNotBlank(userFilter)) {
-            bridgeFromClause.append(" and ").append(userFilter);
-          }
-          if (StringUtils.isNotBlank(storageFilter)) {
-            bridgeFromClause.append(" and ").append(storageFilter);
-          }
-        }
+        bridgeTableJoinContext.updateBridgeClause(rel, fromAlias, toAlias, dimsToQuery.get(rel.getToTable()),
+          userFilter, storageFilter);
         if (cubeql.getTblAliasToColumns().get(toAlias) != null
           && !cubeql.getTblAliasToColumns().get(toAlias).isEmpty()) {
           // there are fields selected from this table after seeing bridge table in path
           // we should make subQuery for this selection
-          clause.append(joinTypeStr).append(" join ");
-          clause.append(bridgeSelectClause.toString());
-          for (String col : cubeql.getTblAliasToColumns().get(toAlias)) {
-            clause.append(",").append(bridgeTableFieldAggr).append("(").append(toAlias)
-              .append(".").append(col)
-              .append(")")
-              .append(" as ").append(col);
-          }
-          String bridgeFrom = bridgeFromClause.toString();
-          clause.append(bridgeFrom);
-          String bridgeFilter = bridgeFilterClause.toString();
-          if (StringUtils.isNotBlank(bridgeFilter)) {
-            if (bridgeFrom.contains(" join ")) {
-              clause.append(" and ");
-            } else {
-              clause.append(" where");
-            }
-            clause.append(bridgeFilter);
-          }
-          clause.append(bridgeGroupbyClause.toString());
-          clause.append(") ").append(toAlias);
-          clause.append(String.format(bridgeJoinClause.toString(), toAlias));
-          clauses.add(clause.toString());
+          clauses.add(bridgeTableJoinContext.generateJoinClause(joinTypeStr, toAlias));
         }
         if (cur.getSubtrees().isEmpty()) {
           // clear bridge flags and builders, as there are no more clauses in this tree.
           hasBridgeTable = false;
-          initedBridgeClauses = false;
-          bridgeSelectClause.setLength(0);
-          bridgeFromClause.setLength(0);
-          bridgeFilterClause.setLength(0);
-          bridgeJoinClause.setLength(0);
-          bridgeGroupbyClause.setLength(0);
+          bridgeTableJoinContext.resetContext();
         }
       } else {
         // Simple join clause is :

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/java/org/apache/lens/cube/parse/join/BridgeTableJoinContext.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/BridgeTableJoinContext.java b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/BridgeTableJoinContext.java
new file mode 100644
index 0000000..cf74634
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/BridgeTableJoinContext.java
@@ -0,0 +1,321 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse.join;
+
+import static org.apache.lens.cube.parse.HQLParser.*;
+
+import java.util.*;
+
+import org.apache.lens.cube.metadata.join.TableRelationship;
+import org.apache.lens.cube.parse.*;
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+
+import lombok.Data;
+import lombok.extern.slf4j.Slf4j;
+
+/**
+ * Join context related to Bridge tables
+ */
+@Slf4j
+public class BridgeTableJoinContext {
+  private final String bridgeTableFieldAggr;
+  private final String arrayFilter;
+  private final CubeQueryContext cubeql;
+  private final CandidateFact fact;
+  private final QueryAST queryAST;
+  private final boolean doFlatteningEarly;
+  private boolean initedBridgeClauses = false;
+  private final StringBuilder bridgeSelectClause = new StringBuilder();
+  private final StringBuilder bridgeFromClause = new StringBuilder();
+  private final StringBuilder bridgeFilterClause = new StringBuilder();
+  private final StringBuilder bridgeJoinClause = new StringBuilder();
+  private final StringBuilder bridgeGroupbyClause = new StringBuilder();
+
+  public BridgeTableJoinContext(CubeQueryContext cubeql, CandidateFact fact, QueryAST queryAST,
+    String bridgeTableFieldAggr, String arrayFilter, boolean doFlatteningEarly) {
+    this.cubeql = cubeql;
+    this.queryAST = queryAST;
+    this.fact = fact;
+    this.bridgeTableFieldAggr = bridgeTableFieldAggr;
+    this.arrayFilter = arrayFilter;
+    this.doFlatteningEarly = doFlatteningEarly;
+  }
+
+  public void resetContext() {
+    initedBridgeClauses = false;
+    bridgeSelectClause.setLength(0);
+    bridgeFromClause.setLength(0);
+    bridgeFilterClause.setLength(0);
+    bridgeJoinClause.setLength(0);
+    bridgeGroupbyClause.setLength(0);
+  }
+
+  public void initBridgeClauses(TableRelationship rel, String fromAlias, String toAlias, CandidateDim toTable, String
+    userFilter,
+    String storageFilter) {
+    // we just found a bridge table in the path we need to initialize the clauses for subquery required for
+    // aggregating fields of bridge table
+    // initialize select clause with join key
+    bridgeSelectClause.append(" (select ").append(toAlias).append(".").append(rel.getToColumn()).append(" as ")
+      .append(rel.getToColumn());
+    // group by join key
+    bridgeGroupbyClause.append(" group by ").append(toAlias).append(".").append(rel.getToColumn());
+    // from clause with bridge table
+    bridgeFromClause.append(" from ").append(toTable.getStorageString(toAlias));
+    // we need to initialize filter clause with user filter clause or storage filter if applicable
+    if (StringUtils.isNotBlank(userFilter)) {
+      bridgeFilterClause.append(userFilter);
+    }
+    if (StringUtils.isNotBlank(storageFilter)) {
+      if (StringUtils.isNotBlank(bridgeFilterClause.toString())) {
+        bridgeFilterClause.append(" and ");
+      }
+      bridgeFilterClause.append(storageFilter);
+    }
+    // initialize final join clause
+    bridgeJoinClause.append(" on ").append(fromAlias).append(".")
+      .append(rel.getFromColumn()).append(" = ").append("%s")
+      .append(".").append(rel.getToColumn());
+    initedBridgeClauses = true;
+  }
+
+  // if any relation has bridge table, the clause becomes the following :
+  // join (" select " + joinkey + " aggr over fields from bridge table + from bridgeTable + [where user/storage
+  // filters] + groupby joinkey) on joincond"
+  // Or
+  // " join (select " + joinkey + " aggr over fields from table reached through bridge table + from bridge table
+  // join <next tables> on join condition + [and user/storage filters] + groupby joinkey) on joincond
+  public void updateBridgeClause(TableRelationship rel, String fromAlias, String toAlias, CandidateDim toTable,
+    String userFilter, String storageFilter) {
+    if (!initedBridgeClauses) {
+      initBridgeClauses(rel, fromAlias, toAlias, toTable, userFilter, storageFilter);
+    } else {
+      addAnotherJoinClause(rel, fromAlias, toAlias, toTable, userFilter, storageFilter);
+    }
+  }
+
+  public void addAnotherJoinClause(TableRelationship rel, String fromAlias, String toAlias, CandidateDim toTable,
+    String userFilter, String storageFilter) {
+    // if bridge clauses are already inited, this is a next table getting joined with bridge table
+    // we will append a simple join clause
+    bridgeFromClause.append(" join ");
+    bridgeFromClause.append(toTable.getStorageString(toAlias));
+    bridgeFromClause.append(" on ").append(fromAlias).append(".")
+      .append(rel.getFromColumn()).append(" = ").append(toAlias)
+      .append(".").append(rel.getToColumn());
+
+    if (StringUtils.isNotBlank(userFilter)) {
+      bridgeFromClause.append(" and ").append(userFilter);
+    }
+    if (StringUtils.isNotBlank(storageFilter)) {
+      bridgeFromClause.append(" and ").append(storageFilter);
+    }
+  }
+
+  public String generateJoinClause(String joinTypeStr, String toAlias) throws LensException {
+    StringBuilder clause = new StringBuilder(joinTypeStr);
+    clause.append(" join ");
+    clause.append(bridgeSelectClause.toString());
+    // iterate over all select expressions and add them for select clause if do_flattening_early is disabled
+    if (!doFlatteningEarly) {
+      BridgeTableSelectCtx selectCtx = new BridgeTableSelectCtx(bridgeTableFieldAggr, arrayFilter, toAlias);
+      selectCtx.processSelectAST(queryAST.getSelectAST());
+      selectCtx.processWhereClauses(fact);
+      selectCtx.processGroupbyAST(queryAST.getGroupByAST());
+      selectCtx.processOrderbyAST(queryAST.getOrderByAST());
+      clause.append(",").append(StringUtils.join(selectCtx.getSelectedBridgeExprs(), ","));
+    } else {
+      for (String col : cubeql.getTblAliasToColumns().get(toAlias)) {
+        clause.append(",").append(bridgeTableFieldAggr).append("(").append(toAlias)
+          .append(".").append(col)
+          .append(")")
+          .append(" as ").append(col);
+      }
+    }
+    String bridgeFrom = bridgeFromClause.toString();
+    clause.append(bridgeFrom);
+    String bridgeFilter = bridgeFilterClause.toString();
+    if (StringUtils.isNotBlank(bridgeFilter)) {
+      if (bridgeFrom.contains(" join ")) {
+        clause.append(" and ");
+      } else {
+        clause.append(" where ");
+      }
+      clause.append(bridgeFilter);
+    }
+    clause.append(bridgeGroupbyClause.toString());
+    clause.append(") ").append(toAlias);
+    clause.append(String.format(bridgeJoinClause.toString(), toAlias));
+    return clause.toString();
+  }
+
+  @Data
+  static class BridgeTableSelectCtx {
+    private final HashMap<HashableASTNode, ASTNode> exprToDotAST = new HashMap<>();
+    private final List<String> selectedBridgeExprs = new ArrayList<>();
+    private final AliasDecider aliasDecider = new DefaultAliasDecider("balias");
+    private final String bridgeTableFieldAggr;
+    private final String arrayFilter;
+    private final String tableAlias;
+
+    List<String> processSelectAST(ASTNode selectAST)
+      throws LensException {
+      // iterate over children
+      for (int i = 0; i < selectAST.getChildCount(); i++) {
+        ASTNode selectExprNode = (ASTNode) selectAST.getChild(i);
+        ASTNode child = (ASTNode) selectExprNode.getChild(0);
+        if (hasBridgeCol(child, tableAlias)) {
+          selectExprNode.setChild(0, getDotASTForExprAST(child));
+        }
+      }
+      return selectedBridgeExprs;
+    }
+
+    private ASTNode getDotASTForExprAST(ASTNode child) {
+      HashableASTNode hashAST = new HashableASTNode(child);
+      if (!exprToDotAST.containsKey(hashAST)) {
+        // add selected expression to get selected from bridge table, with a generated alias
+        String colAlias = aliasDecider.decideAlias(child);
+        selectedBridgeExprs.add(bridgeTableFieldAggr + "(" + HQLParser.getString(child) + ") as " + colAlias);
+
+        // replace bridge expression with tableAlias.colAlias.
+        ASTNode dot = HQLParser.getDotAST(tableAlias, colAlias);
+        exprToDotAST.put(hashAST, dot);
+      }
+      return exprToDotAST.get(hashAST);
+    }
+
+    // process groupby
+    void processGroupbyAST(ASTNode ast)
+      throws LensException {
+      if (ast == null) {
+        return;
+      }
+      // iterate over children
+      for (int i = 0; i < ast.getChildCount(); i++) {
+        ASTNode exprNode = (ASTNode) ast.getChild(i);
+        if (hasBridgeCol(exprNode, tableAlias)) {
+          ast.setChild(i, getDotASTForExprAST(exprNode));
+        }
+      }
+    }
+
+    // process orderby
+    void processOrderbyAST(ASTNode ast)
+      throws LensException {
+      if (ast == null) {
+        return;
+      }
+      // iterate over children
+      for (int i = 0; i < ast.getChildCount(); i++) {
+        ASTNode exprNode = (ASTNode) ast.getChild(i);
+        ASTNode child = (ASTNode) exprNode.getChild(0);
+        if (hasBridgeCol(child, tableAlias)) {
+          exprNode.setChild(0, getDotASTForExprAST(child));
+        }
+      }
+    }
+
+    void processWhereClauses(CandidateFact fact) throws LensException {
+
+      for (Map.Entry<String, ASTNode> whereEntry : fact.getStorgeWhereClauseMap().entrySet()) {
+        ASTNode whereAST = whereEntry.getValue();
+        processWhereAST(whereAST, null, 0);
+      }
+    }
+
+    void processWhereAST(ASTNode ast, ASTNode parent, int childPos)
+      throws LensException {
+      if (ast == null) {
+        return;
+      }
+      ASTNode child;
+      int replaceIndex = -1;
+      if (isPrimitiveBooleanExpression(ast)) {
+        replaceIndex = 0;
+      } else if (isPrimitiveBooleanFunction(ast)) {
+        replaceIndex = 1;
+      }
+      if (replaceIndex != -1) {
+        child = (ASTNode) ast.getChild(replaceIndex);
+        if (hasBridgeCol(child, tableAlias)) {
+          ast.setChild(replaceIndex, getDotASTForExprAST(child));
+          parent.setChild(childPos, replaceDirectFiltersWithArrayFilter(ast, arrayFilter));
+        }
+      }
+      // recurse down
+      for (int i = 0; i < ast.getChildCount(); i++) {
+        processWhereAST((ASTNode) ast.getChild(i), ast, i);
+      }
+    }
+  }
+  /**
+   * Update =, != and IN clause filters to arrayFilter. arrayFilter will have signature arrayFilter(col, value)
+   *
+   * @param ast AST for simple filter
+   * @param arrayFilter arrayFilter function
+   * @return ASTNode with converted filter
+   *
+   * @throws LensException
+   */
+  static ASTNode replaceDirectFiltersWithArrayFilter(ASTNode ast, String arrayFilter)
+    throws LensException {
+    StringBuilder filterBuilder = new StringBuilder();
+    if ((ast.getType() == HiveParser.EQUAL || ast.getType() == HiveParser.NOTEQUAL)) {
+      String colStr = getString((ASTNode) ast.getChild(0));
+      if (ast.getType() == HiveParser.NOTEQUAL) {
+        filterBuilder.append(" NOT ");
+      }
+      filterBuilder.append(arrayFilter);
+      filterBuilder.append("(");
+      filterBuilder.append(colStr).append(",");
+      filterBuilder.append(getString((ASTNode)ast.getChild(1)));
+      filterBuilder.append(")");
+    } else if (ast.getType() == HiveParser.TOK_FUNCTION) {
+      // This is IN clause as function
+      String colStr = getString((ASTNode) ast.getChild(1));
+      filterBuilder.append("(");
+      for (int i = 2; i < ast.getChildCount(); i++) {
+        filterBuilder.append(arrayFilter);
+        filterBuilder.append("(");
+        filterBuilder.append(colStr).append(",");
+        filterBuilder.append(ast.getChild(i).getText());
+        filterBuilder.append(")");
+        if (i + 1 != ast.getChildCount()) {
+          filterBuilder.append(" OR ");
+        }
+      }
+      filterBuilder.append(")");
+    }
+    String finalFilter = filterBuilder.toString();
+    if (StringUtils.isNotBlank(finalFilter)) {
+      return HQLParser.parseExpr(finalFilter);
+    }
+    return ast;
+  }
+
+  static boolean hasBridgeCol(ASTNode astNode, String tableAlias) throws LensException {
+    Set<String> bridgeCols = HQLParser.getColsInExpr(tableAlias, astNode);
+    return !bridgeCols.isEmpty();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/main/resources/olap-query-conf.xml
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/resources/olap-query-conf.xml b/lens-cube/src/main/resources/olap-query-conf.xml
index 1436cd1..6e9933e 100644
--- a/lens-cube/src/main/resources/olap-query-conf.xml
+++ b/lens-cube/src/main/resources/olap-query-conf.xml
@@ -211,4 +211,22 @@
       be passed for each driver, as the function could be different for each driver.
     </description>
   </property>
+  <property>
+    <name>lens.cube.query.bridge.table.field.array.filter</name>
+    <value>array_contains</value>
+    <description>The array filter function to be used for filter on fields from bridge tables. Would be used
+      only when flattening is enabled and not flattening early. The value can be passed for each query.
+      During typical deployments it would be passed for each driver, as the function could be different for each
+      driver. Signature of the filter getting passed here should be very similar to array_contains. i.e.
+      array_contains(array, value) to return true if array contains value.
+    </description>
+  </property>
+  <property>
+    <name>lens.cube.query.flatten.bridge.tables.early</name>
+    <value>false</value>
+    <description>Flag specifies if fields selected have to be flattened earlier to applying any expression on top
+      of them, For ex: sub_str(bridge_field) is the expression, if the property is set to true, sub_str will be
+      applied on aggregated field. If set to false, aggregate will applied on top of the expression i.e. sub_str.
+    </description>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/dba885ca/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 42decc6..379e873 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -1069,6 +1069,16 @@ public class CubeTestSetup {
         });
       }
     });
+    joinChains.add(new JoinChain("userchain", "user-chain", "user chain") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("basecube", "userid"));
+            add(new TableReference("userdim", "id"));
+          }
+        });
+      }
+    });
     joinChains.add(new JoinChain("userSports", "user-sports", "user sports") {
       {
         addPath(new ArrayList<TableReference>() {
@@ -1131,7 +1141,14 @@ public class CubeTestSetup {
     Set<ExprColumn> baseExprs = new HashSet<>(exprs);
     baseExprs.add(new ExprColumn(new FieldSchema("substrsprorts", "String", "substr of sports"), "substr sports",
       "substr(sports, 10)"));
-
+    baseExprs.add(new ExprColumn(new FieldSchema("xsports_abbr", "array<string>", ""),
+      "xuser sports", "substr(xsports, 3)"));
+    baseExprs.add(new ExprColumn(new FieldSchema("ysports_abbr", "array<string>", ""),
+      "yuser sports", "substr(ysports, 3)"));
+    baseExprs.add(new ExprColumn(new FieldSchema("sports_abbr", "array<string>", ""),
+      "user sports", "substr(sports, 3)"));
+    baseExprs.add(new ExprColumn(new FieldSchema("sportids_abbr", "array<string>", ""),
+      "user sports", "case when sportids == 1 then 'CKT' when sportids == 2 then 'FTB' else 'NON' end"));
     client.createCube(BASE_CUBE_NAME, cubeMeasures2, cubeDimensions2, baseExprs, joinChains, cubeProperties);
 
     Map<String, String> derivedProperties = new HashMap<>();