You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by vi...@apache.org on 2018/03/20 20:58:06 UTC

[1/5] drill git commit: DRILL-6241: Saffron properties config has the excessive permissions

Repository: drill
Updated Branches:
  refs/heads/master dd4a46a6c -> b4c599e33


DRILL-6241: Saffron properties config has the excessive permissions

changed saffron.properties permission to 640

closes #1167


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/4e3e0ffd
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/4e3e0ffd
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/4e3e0ffd

Branch: refs/heads/master
Commit: 4e3e0ffd0e9fa72f5a8865188c4cfb79b7f2cb95
Parents: dd4a46a
Author: Vladimir Tkach <vo...@gmail.com>
Authored: Wed Mar 14 13:21:43 2018 +0200
Committer: Vitalii Diravka <vi...@gmail.com>
Committed: Tue Mar 20 22:29:02 2018 +0200

----------------------------------------------------------------------
 distribution/src/assemble/bin.xml | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/4e3e0ffd/distribution/src/assemble/bin.xml
----------------------------------------------------------------------
diff --git a/distribution/src/assemble/bin.xml b/distribution/src/assemble/bin.xml
index 82c4d90..fff07b3 100644
--- a/distribution/src/assemble/bin.xml
+++ b/distribution/src/assemble/bin.xml
@@ -411,6 +411,7 @@
     <file>
       <source>src/resources/saffron.properties</source>
       <outputDirectory>conf</outputDirectory>
+      <fileMode>0640</fileMode>
     </file>
     <file>
       <source>src/resources/drill-on-yarn-example.conf</source>


[5/5] drill git commit: DRILL-6145: Implement Hive MapR-DB JSON handler

Posted by vi...@apache.org.
DRILL-6145: Implement Hive MapR-DB JSON handler

closes #1158


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/b4c599e3
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/b4c599e3
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/b4c599e3

Branch: refs/heads/master
Commit: b4c599e33606f3e2fef132dbd38ee69b516e681e
Parents: ddb73a5
Author: Vitalii Diravka <vi...@gmail.com>
Authored: Fri Mar 9 18:02:42 2018 +0200
Committer: Vitalii Diravka <vi...@gmail.com>
Committed: Tue Mar 20 22:40:32 2018 +0200

----------------------------------------------------------------------
 contrib/storage-hive/core/pom.xml               | 10 ++++++
 .../drill/exec/store/hive/HiveUtilities.java    |  4 +++
 distribution/pom.xml                            |  8 +++++
 pom.xml                                         | 38 +++++++++++++++++---
 4 files changed, 56 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/b4c599e3/contrib/storage-hive/core/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/pom.xml b/contrib/storage-hive/core/pom.xml
index 24c3cf3..351ed88 100644
--- a/contrib/storage-hive/core/pom.xml
+++ b/contrib/storage-hive/core/pom.xml
@@ -172,6 +172,16 @@
           <groupId>com.tdunning</groupId>
           <artifactId>json</artifactId>
         </dependency>
+        <dependency>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-maprdb-json-handler</artifactId>
+          <scope>runtime</scope>
+        </dependency>
+        <dependency>
+          <groupId>com.mapr.db</groupId>
+          <artifactId>maprdb-mapreduce</artifactId>
+          <scope>runtime</scope>
+        </dependency>
       </dependencies>
     </profile>
   </profiles>

http://git-wip-us.apache.org/repos/asf/drill/blob/b4c599e3/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
index 05b7e89..5ff5506 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java
@@ -64,6 +64,7 @@ import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
@@ -393,6 +394,9 @@ public class HiveUtilities {
             "InputFormat class explicitly specified nor StorageHandler class");
       }
       final HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(job, storageHandlerClass);
+      TableDesc tableDesc = new TableDesc();
+      tableDesc.setProperties(MetaStoreUtils.getTableMetadata(table));
+      storageHandler.configureInputJobProperties(tableDesc, table.getParameters());
       return (Class<? extends InputFormat<?, ?>>) storageHandler.getInputFormatClass();
     } else {
       return (Class<? extends InputFormat<?, ?>>) Class.forName(inputFormatName) ;

http://git-wip-us.apache.org/repos/asf/drill/blob/b4c599e3/distribution/pom.xml
----------------------------------------------------------------------
diff --git a/distribution/pom.xml b/distribution/pom.xml
index 7416b9c..7f481b3 100644
--- a/distribution/pom.xml
+++ b/distribution/pom.xml
@@ -324,6 +324,14 @@
           <groupId>org.apache.hbase</groupId>
           <artifactId>hbase-protocol</artifactId>
         </dependency>
+        <dependency>
+          <groupId>org.apache.hive</groupId>
+          <artifactId>hive-maprdb-json-handler</artifactId>
+        </dependency>
+        <dependency>
+          <groupId>com.mapr.db</groupId>
+          <artifactId>maprdb-mapreduce</artifactId>
+        </dependency>
       </dependencies>
       <build>
       </build>

http://git-wip-us.apache.org/repos/asf/drill/blob/b4c599e3/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 504d4f3..d2a2262 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2265,6 +2265,40 @@
             <version>4.5</version>
           </dependency>
           <dependency>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-maprdb-json-handler</artifactId>
+            <version>${hive.version}</version>
+            <exclusions>
+              <exclusion>
+                <groupId>org.apache.hive</groupId>
+                <artifactId>hive-exec</artifactId>
+              </exclusion>
+            </exclusions>
+          </dependency>
+          <dependency>
+            <groupId>com.mapr.db</groupId>
+            <artifactId>maprdb-mapreduce</artifactId>
+            <version>${mapr.release.version}</version>
+            <exclusions>
+              <exclusion>
+                <groupId>log4j</groupId>
+                <artifactId>log4j</artifactId>
+              </exclusion>
+              <exclusion>
+                <groupId>org.slf4j</groupId>
+                <artifactId>slf4j-log4j12</artifactId>
+              </exclusion>
+              <exclusion>
+                <groupId>commons-logging</groupId>
+                <artifactId>commons-logging</artifactId>
+              </exclusion>
+              <exclusion>
+                <groupId>javax.servlet</groupId>
+                <artifactId>servlet-api</artifactId>
+              </exclusion>
+            </exclusions>
+          </dependency>
+          <dependency>
             <groupId>xerces</groupId>
             <artifactId>xercesImpl</artifactId>
             <version>2.11.0</version>
@@ -2369,10 +2403,6 @@
                 <groupId>org.slf4j</groupId>
               </exclusion>
               <exclusion>
-                <artifactId>slf4j-log4j12</artifactId>
-                <groupId>org.slf4j</groupId>
-              </exclusion>
-              <exclusion>
                 <artifactId>log4j</artifactId>
                 <groupId>log4j</groupId>
               </exclusion>


[2/5] drill git commit: DRILL-6250: Sqlline start command with password appears in the sqlline.log

Posted by vi...@apache.org.
DRILL-6250: Sqlline start command with password appears in the sqlline.log

closes #1174


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/edc982a9
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/edc982a9
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/edc982a9

Branch: refs/heads/master
Commit: edc982a90f3551d1e30ec210f18d87ec4c084b70
Parents: 4e3e0ff
Author: Vladimir Tkach <vo...@gmail.com>
Authored: Sat Mar 17 17:27:10 2018 +0200
Committer: Vitalii Diravka <vi...@gmail.com>
Committed: Tue Mar 20 22:33:34 2018 +0200

----------------------------------------------------------------------
 .../java/org/apache/drill/common/config/DrillConfig.java  | 10 +++++++---
 1 file changed, 7 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/edc982a9/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/config/DrillConfig.java b/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
index 73c899d..0d5c881 100644
--- a/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
+++ b/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
@@ -52,8 +52,10 @@ public class DrillConfig extends NestedConfig {
   public DrillConfig(Config config) {
     super(config);
     logger.debug("Setting up DrillConfig object.");
-    logger.trace("Given Config object is:\n{}",
-                 config.root().render(ConfigRenderOptions.defaults()));
+    // we need to exclude sun.java.command config node while logging, because
+    // it contains user password along with other parameters
+    logger.trace("Given Config object is:\n{}", config.withoutPath("password").withoutPath("sun.java.command")
+                 .root().render(ConfigRenderOptions.defaults()));
     RuntimeMXBean bean = ManagementFactory.getRuntimeMXBean();
     this.startupArguments = ImmutableList.copyOf(bean.getInputArguments());
     logger.debug("DrillConfig object initialized.");
@@ -239,7 +241,9 @@ public class DrillConfig extends NestedConfig {
     if (overriderProps != null) {
       logString.append("Overridden Properties:\n");
       for(Entry<Object, Object> entry : overriderProps.entrySet()){
-        logString.append("\t-").append(entry.getKey()).append(" = ").append(entry.getValue()).append("\n");
+        if (!entry.getKey().equals("password")) {
+          logString.append("\t-").append(entry.getKey()).append(" = ").append(entry.getValue()).append("\n");
+        }
       }
       logString.append("\n");
       effectiveConfig =


[3/5] drill git commit: DRILL-6275: Fixed direct memory reporting in sys.memory.

Posted by vi...@apache.org.
DRILL-6275: Fixed direct memory reporting in sys.memory.

closes #1176


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/cdc21ce7
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/cdc21ce7
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/cdc21ce7

Branch: refs/heads/master
Commit: cdc21ce72416f6b426ce7cab7614766af37dca2f
Parents: edc982a
Author: Timothy Farkas <ti...@apache.org>
Authored: Mon Mar 19 13:16:37 2018 -0700
Committer: Vitalii Diravka <vi...@gmail.com>
Committed: Tue Mar 20 22:38:16 2018 +0200

----------------------------------------------------------------------
 .../org/apache/drill/exec/ops/ExecutorFragmentContext.java     | 6 ++++++
 .../java/org/apache/drill/exec/ops/FragmentContextImpl.java    | 5 +++++
 .../java/org/apache/drill/exec/store/sys/MemoryIterator.java   | 4 ++--
 .../drill/exec/physical/unit/PhysicalOpUnitTestBase.java       | 6 ++++++
 4 files changed, 19 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/cdc21ce7/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ExecutorFragmentContext.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ExecutorFragmentContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ExecutorFragmentContext.java
index 82bb886..8031a15 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ExecutorFragmentContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ExecutorFragmentContext.java
@@ -18,6 +18,7 @@
 package org.apache.drill.exec.ops;
 
 import org.apache.drill.exec.coord.ClusterCoordinator;
+import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.physical.impl.OperatorCreatorRegistry;
 import org.apache.drill.exec.planner.PhysicalPlanReader;
 import org.apache.drill.exec.proto.CoordinationProtos;
@@ -31,6 +32,11 @@ import java.util.Map;
 import java.util.Set;
 
 public interface ExecutorFragmentContext extends RootFragmentContext {
+  /**
+   * Returns the root allocator for the Drillbit.
+   * @return The root allocator for the Drillbit.
+   */
+  BufferAllocator getRootAllocator();
 
   PhysicalPlanReader getPlanReader();
 

http://git-wip-us.apache.org/repos/asf/drill/blob/cdc21ce7/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContextImpl.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContextImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContextImpl.java
index b3c84bc..c9b2070 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContextImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContextImpl.java
@@ -336,6 +336,11 @@ public class FragmentContextImpl extends BaseFragmentContext implements Executor
   }
 
   @Override
+  public BufferAllocator getRootAllocator() {
+    return context.getAllocator();
+  }
+
+  @Override
   public BufferAllocator getNewChildAllocator(final String operatorName,
       final int operatorId,
       final long initialReservation,

http://git-wip-us.apache.org/repos/asf/drill/blob/cdc21ce7/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/MemoryIterator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/MemoryIterator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/MemoryIterator.java
index 8d437a6..1140685 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/MemoryIterator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/MemoryIterator.java
@@ -61,8 +61,8 @@ public class MemoryIterator implements Iterator<Object> {
     BufferPoolMXBean directBean = getDirectBean();
     memoryInfo.jvm_direct_current = directBean.getMemoryUsed();
 
-
-    memoryInfo.direct_current = context.getAllocator().getAllocatedMemory();
+    // We need the memory used by the root allocator for the Drillbit
+    memoryInfo.direct_current = context.getRootAllocator().getAllocatedMemory();
     memoryInfo.direct_max = DrillConfig.getMaxDirectMemory();
     return memoryInfo;
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/cdc21ce7/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java
index b5dbcf8..93a7e54 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java
@@ -26,6 +26,7 @@ import org.antlr.runtime.CommonTokenStream;
 import org.antlr.runtime.RecognitionException;
 import org.apache.calcite.rel.RelFieldCollation;
 import org.apache.drill.exec.coord.ClusterCoordinator;
+import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.ops.AccountingDataTunnel;
 import org.apache.drill.exec.ops.AccountingUserConnection;
 import org.apache.drill.exec.ops.ExecutorFragmentContext;
@@ -345,6 +346,11 @@ public class PhysicalOpUnitTestBase extends ExecTest {
     }
 
     @Override
+    public BufferAllocator getRootAllocator() {
+      return null;
+    }
+
+    @Override
     public PhysicalPlanReader getPlanReader() {
       throw new UnsupportedOperationException();
     }


[4/5] drill git commit: DRILL-6199: Add support for filter push down and partition pruning with several nested star sub-queries

Posted by vi...@apache.org.
DRILL-6199: Add support for filter push down and partition pruning with several nested star sub-queries

closes #1152


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/ddb73a50
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/ddb73a50
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/ddb73a50

Branch: refs/heads/master
Commit: ddb73a506c6cbb6492d6ff91ca174dbff1b8a045
Parents: cdc21ce
Author: Arina Ielchiieva <ar...@gmail.com>
Authored: Sun Mar 4 22:12:06 2018 +0200
Committer: Vitalii Diravka <vi...@gmail.com>
Committed: Tue Mar 20 22:40:00 2018 +0200

----------------------------------------------------------------------
 .../apache/drill/exec/planner/PlannerPhase.java |  33 +-
 .../DrillFilterItemStarReWriterRule.java        | 301 +++++++++++++------
 .../TestPushDownAndPruningWithItemStar.java     | 114 ++++++-
 3 files changed, 343 insertions(+), 105 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/ddb73a50/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
index f46a7ee..f71c281 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PlannerPhase.java
@@ -29,6 +29,7 @@ import org.apache.calcite.tools.RuleSets;
 import org.apache.drill.exec.ops.OptimizerRulesContext;
 import org.apache.drill.exec.planner.logical.DrillAggregateRule;
 import org.apache.drill.exec.planner.logical.DrillFilterAggregateTransposeRule;
+import org.apache.drill.exec.planner.logical.DrillFilterItemStarReWriterRule;
 import org.apache.drill.exec.planner.logical.DrillFilterJoinRules;
 import org.apache.drill.exec.planner.logical.DrillFilterRule;
 import org.apache.drill.exec.planner.logical.DrillJoinRel;
@@ -41,7 +42,6 @@ import org.apache.drill.exec.planner.logical.DrillPushLimitToScanRule;
 import org.apache.drill.exec.planner.logical.DrillPushProjectIntoScanRule;
 import org.apache.drill.exec.planner.logical.DrillPushProjectPastFilterRule;
 import org.apache.drill.exec.planner.logical.DrillPushProjectPastJoinRule;
-import org.apache.drill.exec.planner.logical.DrillFilterItemStarReWriterRule;
 import org.apache.drill.exec.planner.logical.DrillReduceAggregatesRule;
 import org.apache.drill.exec.planner.logical.DrillReduceExpressionsRule;
 import org.apache.drill.exec.planner.logical.DrillRelFactories;
@@ -80,6 +80,7 @@ import org.apache.drill.exec.store.parquet.ParquetPushDownFilter;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
+import java.util.Set;
 
 /**
  * Returns RuleSet for concrete planner phase.
@@ -227,7 +228,7 @@ public enum PlannerPhase {
 
     // This list is used to store rules that can be turned on an off
     // by user facing planning options
-    final Builder<RelOptRule> userConfigurableRules = ImmutableSet.<RelOptRule>builder();
+    final Builder<RelOptRule> userConfigurableRules = ImmutableSet.builder();
 
     if (ps.isConstantFoldingEnabled()) {
       // TODO - DRILL-2218
@@ -337,8 +338,8 @@ public enum PlannerPhase {
    */
   static RuleSet getPruneScanRules(OptimizerRulesContext optimizerRulesContext) {
     final ImmutableSet<RelOptRule> pruneRules = ImmutableSet.<RelOptRule>builder()
+        .addAll(getItemStarRules())
         .add(
-            DrillFilterItemStarReWriterRule.INSTANCE,
             PruneScanRule.getDirFilterOnProject(optimizerRulesContext),
             PruneScanRule.getDirFilterOnScan(optimizerRulesContext),
             ParquetPruneScanRule.getFilterOnProjectParquet(optimizerRulesContext),
@@ -373,14 +374,15 @@ public enum PlannerPhase {
   }
 
   /**
-   *  Get an immutable list of directory-based partition pruing rules that will be used in Calcite logical planning.
-   * @param optimizerRulesContext
-   * @return
+   *  Get an immutable list of directory-based partition pruning rules that will be used in Calcite logical planning.
+   *
+   * @param optimizerRulesContext rules context
+   * @return directory-based partition pruning rules
    */
   static RuleSet getDirPruneScanRules(OptimizerRulesContext optimizerRulesContext) {
-    final ImmutableSet<RelOptRule> pruneRules = ImmutableSet.<RelOptRule>builder()
+    final Set<RelOptRule> pruneRules = ImmutableSet.<RelOptRule>builder()
+        .addAll(getItemStarRules())
         .add(
-            DrillFilterItemStarReWriterRule.INSTANCE,
             PruneScanRule.getDirFilterOnProject(optimizerRulesContext),
             PruneScanRule.getDirFilterOnScan(optimizerRulesContext)
         )
@@ -402,7 +404,7 @@ public enum PlannerPhase {
       ProjectPrule.INSTANCE
     ));
 
-  static final RuleSet getPhysicalRules(OptimizerRulesContext optimizerRulesContext) {
+  static RuleSet getPhysicalRules(OptimizerRulesContext optimizerRulesContext) {
     final List<RelOptRule> ruleList = new ArrayList<>();
     final PlannerSettings ps = optimizerRulesContext.getPlannerSettings();
 
@@ -472,4 +474,17 @@ public enum PlannerPhase {
     return RuleSets.ofList(relOptRuleSetBuilder.build());
   }
 
+  /**
+   * @return collection of rules to re-write item star operator for filter push down and partition pruning
+   */
+  private static ImmutableSet<RelOptRule> getItemStarRules() {
+    return ImmutableSet.<RelOptRule>builder()
+       .add(
+             DrillFilterItemStarReWriterRule.PROJECT_ON_SCAN,
+             DrillFilterItemStarReWriterRule.FILTER_ON_SCAN,
+             DrillFilterItemStarReWriterRule.FILTER_PROJECT_SCAN
+       ).build();
+  }
+
+
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/ddb73a50/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterItemStarReWriterRule.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterItemStarReWriterRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterItemStarReWriterRule.java
index 84ae674..27f8c49 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterItemStarReWriterRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterItemStarReWriterRule.java
@@ -17,21 +17,10 @@
  */
 package org.apache.drill.exec.planner.logical;
 
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import org.apache.calcite.adapter.enumerable.EnumerableTableScan;
 import org.apache.calcite.plan.RelOptRule;
 import org.apache.calcite.plan.RelOptRuleCall;
 import org.apache.calcite.plan.RelOptRuleOperand;
-import org.apache.calcite.plan.RelOptTable;
-import org.apache.calcite.prepare.RelOptTableImpl;
-import org.apache.calcite.rel.RelNode;
-import org.apache.calcite.rel.core.CorrelationId;
-import org.apache.calcite.rel.core.Filter;
-import org.apache.calcite.rel.core.Project;
-import org.apache.calcite.rel.core.TableScan;
-import org.apache.calcite.rel.logical.LogicalFilter;
-import org.apache.calcite.rel.logical.LogicalProject;
+import org.apache.calcite.rel.rules.ProjectRemoveRule;
 import org.apache.calcite.rel.type.RelDataType;
 import org.apache.calcite.rel.type.RelDataTypeFactory;
 import org.apache.calcite.rel.type.RelDataTypeField;
@@ -39,10 +28,11 @@ import org.apache.calcite.rex.RexCall;
 import org.apache.calcite.rex.RexInputRef;
 import org.apache.calcite.rex.RexNode;
 import org.apache.calcite.rex.RexVisitorImpl;
-import org.apache.calcite.schema.Table;
+import org.apache.drill.common.expression.PathSegment;
+import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.planner.types.RelDataTypeDrillImpl;
 import org.apache.drill.exec.planner.types.RelDataTypeHolder;
-import org.apache.drill.exec.util.Utilities;
+import org.apache.drill.exec.store.parquet.ParquetGroupScan;
 
 import java.util.ArrayList;
 import java.util.Collection;
@@ -54,83 +44,189 @@ import static org.apache.drill.exec.planner.logical.FieldsReWriterUtil.DesiredFi
 import static org.apache.drill.exec.planner.logical.FieldsReWriterUtil.FieldsReWriter;
 
 /**
- * Rule will transform filter -> project -> scan call with item star fields in filter
- * into project -> filter -> project -> scan where item star fields are pushed into scan
- * and replaced with actual field references.
+ * Rule will transform item star fields in filter and replaced with actual field references.
  *
  * This will help partition pruning and push down rules to detect fields that can be pruned or push downed.
  * Item star operator appears when sub-select or cte with star are used as source.
  */
-public class DrillFilterItemStarReWriterRule extends RelOptRule {
+public class DrillFilterItemStarReWriterRule {
 
-  public static final DrillFilterItemStarReWriterRule INSTANCE = new DrillFilterItemStarReWriterRule(
-      RelOptHelper.some(Filter.class, RelOptHelper.some(Project.class, RelOptHelper.any( TableScan.class))),
-      "DrillFilterItemStarReWriterRule");
+  public static final ProjectOnScan PROJECT_ON_SCAN = new ProjectOnScan(
+          RelOptHelper.some(DrillProjectRel.class, RelOptHelper.any(DrillScanRel.class)),
+          "DrillFilterItemStarReWriterRule.ProjectOnScan");
 
-  private DrillFilterItemStarReWriterRule(RelOptRuleOperand operand, String id) {
-    super(operand, id);
-  }
+  public static final FilterOnScan FILTER_ON_SCAN = new FilterOnScan(
+      RelOptHelper.some(DrillFilterRel.class, RelOptHelper.any(DrillScanRel.class)),
+      "DrillFilterItemStarReWriterRule.FilterOnScan");
 
-  @Override
-  public void onMatch(RelOptRuleCall call) {
-    Filter filterRel = call.rel(0);
-    Project projectRel = call.rel(1);
-    TableScan scanRel = call.rel(2);
+  public static final FilterProjectScan FILTER_PROJECT_SCAN = new FilterProjectScan(
+      RelOptHelper.some(DrillFilterRel.class, RelOptHelper.some(DrillProjectRel.class, RelOptHelper.any(DrillScanRel.class))),
+      "DrillFilterItemStarReWriterRule.FilterProjectScan");
 
-    ItemStarFieldsVisitor itemStarFieldsVisitor = new ItemStarFieldsVisitor(filterRel.getRowType().getFieldNames());
-    filterRel.getCondition().accept(itemStarFieldsVisitor);
 
-    // there are no item fields, no need to proceed further
-    if (!itemStarFieldsVisitor.hasItemStarFields()) {
-      return;
+  private static class ProjectOnScan extends RelOptRule {
+
+    ProjectOnScan(RelOptRuleOperand operand, String id) {
+      super(operand, id);
     }
 
-    Map<String, DesiredField> itemStarFields = itemStarFieldsVisitor.getItemStarFields();
+    @Override
+    public boolean matches(RelOptRuleCall call) {
+      DrillScanRel scan = call.rel(1);
+      return scan.getGroupScan() instanceof ParquetGroupScan && super.matches(call);
+    }
+
+    @Override
+    public void onMatch(RelOptRuleCall call) {
+      DrillProjectRel projectRel = call.rel(0);
+      DrillScanRel scanRel = call.rel(1);
+
+      ItemStarFieldsVisitor itemStarFieldsVisitor = new ItemStarFieldsVisitor(scanRel.getRowType().getFieldNames());
+      List<RexNode> projects = projectRel.getProjects();
+      for (RexNode project : projects) {
+        project.accept(itemStarFieldsVisitor);
+      }
 
-    // create new scan
-    RelNode newScan = constructNewScan(scanRel, itemStarFields.keySet());
+      // if there are no item fields, no need to proceed further
+      if (itemStarFieldsVisitor.hasNoItemStarFields()) {
+        return;
+      }
 
-    // combine original and new projects
-    List<RexNode> newProjects = new ArrayList<>(projectRel.getProjects());
+      Map<String, DesiredField> itemStarFields = itemStarFieldsVisitor.getItemStarFields();
 
-    // prepare node mapper to replace item star calls with new input field references
-    Map<RexNode, Integer> fieldMapper = new HashMap<>();
+      DrillScanRel newScan = createNewScan(scanRel, itemStarFields);
 
-    // since scan might have already some fields, new field reference index should start from the last used in scan
-    // NB: field reference index starts from 0 thus original field count can be taken as starting index
-    int index = scanRel.getRowType().getFieldCount();
+      // re-write projects
+      Map<RexNode, Integer> fieldMapper = createFieldMapper(itemStarFields.values(), scanRel.getRowType().getFieldCount());
+      FieldsReWriter fieldsReWriter = new FieldsReWriter(fieldMapper);
+      List<RexNode> newProjects = new ArrayList<>();
+      for (RexNode node : projectRel.getChildExps()) {
+        newProjects.add(node.accept(fieldsReWriter));
+      }
 
-    for (DesiredField desiredField : itemStarFields.values()) {
-      RexInputRef inputRef = new RexInputRef(index, desiredField.getType());
-      // add references to item star fields in new project
-      newProjects.add(inputRef);
-      for (RexNode node : desiredField.getNodes()) {
-        // if field is referenced in more then one call, add each call to field mapper
-        fieldMapper.put(node, index);
+      DrillProjectRel newProject = new DrillProjectRel(
+          projectRel.getCluster(),
+          projectRel.getTraitSet(),
+          newScan,
+          newProjects,
+          projectRel.getRowType());
+
+      if (ProjectRemoveRule.isTrivial(newProject)) {
+        call.transformTo(newScan);
+      } else {
+        call.transformTo(newProject);
       }
-      // increment index for the next node reference
-      index++;
     }
 
-    // create new project row type
-    RelDataType newProjectRowType = getNewRowType(
-        projectRel.getCluster().getTypeFactory(),
-        projectRel.getRowType().getFieldList(),
-        itemStarFields.keySet());
+  }
+
+  private static class FilterOnScan extends RelOptRule {
+
+    FilterOnScan(RelOptRuleOperand operand, String id) {
+      super(operand, id);
+    }
+
+    @Override
+    public boolean matches(RelOptRuleCall call) {
+      DrillScanRel scan = call.rel(1);
+      return scan.getGroupScan() instanceof ParquetGroupScan && super.matches(call);
+    }
+
+    @Override
+    public void onMatch(RelOptRuleCall call) {
+      DrillFilterRel filterRel = call.rel(0);
+      DrillScanRel scanRel = call.rel(1);
+      transformFilterCall(filterRel, null, scanRel, call);
+    }
+  }
+
+  private static class FilterProjectScan extends RelOptRule {
+
+    FilterProjectScan(RelOptRuleOperand operand, String id) {
+      super(operand, id);
+    }
+
+    @Override
+    public boolean matches(RelOptRuleCall call) {
+      DrillScanRel scan = call.rel(2);
+      return scan.getGroupScan() instanceof ParquetGroupScan && super.matches(call);
+    }
+
+    @Override
+    public void onMatch(RelOptRuleCall call) {
+      DrillFilterRel filterRel = call.rel(0);
+      DrillProjectRel projectRel = call.rel(1);
+      DrillScanRel scanRel = call.rel(2);
+      transformFilterCall(filterRel, projectRel, scanRel, call);
+    }
+  }
+
+
+  /**
+   * Removes item star call from filter expression and propagates changes into project (if present) and scan.
+   *
+   * @param filterRel original filter expression
+   * @param projectRel original project expression
+   * @param scanRel original scan expression
+   * @param call original rule call
+   */
+  private static void transformFilterCall(DrillFilterRel filterRel, DrillProjectRel projectRel, DrillScanRel scanRel, RelOptRuleCall call) {
+    List<String> fieldNames = projectRel == null ? scanRel.getRowType().getFieldNames() : projectRel.getRowType().getFieldNames();
+    ItemStarFieldsVisitor itemStarFieldsVisitor = new ItemStarFieldsVisitor(fieldNames);
+    filterRel.getCondition().accept(itemStarFieldsVisitor);
+
+    // if there are no item fields, no need to proceed further
+    if (itemStarFieldsVisitor.hasNoItemStarFields()) {
+      return;
+    }
+
+    Map<String, DesiredField> itemStarFields = itemStarFieldsVisitor.getItemStarFields();
+
+    DrillScanRel newScan = createNewScan(scanRel, itemStarFields);
+
+    // create new project if was present in call
+    DrillProjectRel newProject = null;
+    if (projectRel != null) {
 
-    // create new project
-    RelNode newProject = new LogicalProject(projectRel.getCluster(), projectRel.getTraitSet(), newScan, newProjects, newProjectRowType);
+      // add new projects to the already existing in original project
+      int projectIndex = scanRel.getRowType().getFieldCount();
+      List<RexNode> newProjects = new ArrayList<>(projectRel.getProjects());
+      for (DesiredField desiredField : itemStarFields.values()) {
+        newProjects.add(new RexInputRef(projectIndex, desiredField.getType()));
+        projectIndex++;
+      }
+
+      RelDataType newProjectRowType = createNewRowType(
+          projectRel.getCluster().getTypeFactory(),
+          projectRel.getRowType().getFieldList(),
+          itemStarFields.keySet());
+
+      newProject = new DrillProjectRel(
+          projectRel.getCluster(),
+          projectRel.getTraitSet(),
+          newScan,
+          newProjects,
+          newProjectRowType);
+    }
 
     // transform filter condition
+    Map<RexNode, Integer> fieldMapper = createFieldMapper(itemStarFields.values(), scanRel.getRowType().getFieldCount());
     FieldsReWriter fieldsReWriter = new FieldsReWriter(fieldMapper);
     RexNode newCondition = filterRel.getCondition().accept(fieldsReWriter);
 
     // create new filter
-    RelNode newFilter = new LogicalFilter(filterRel.getCluster(), filterRel.getTraitSet(), newProject, newCondition, ImmutableSet.<CorrelationId>of());
+    DrillFilterRel newFilter = DrillFilterRel.create(newProject != null ? newProject : newScan, newCondition);
 
     // wrap with project to have the same row type as before
-    Project wrapper = projectRel.copy(projectRel.getTraitSet(), newFilter, projectRel.getProjects(), projectRel.getRowType());
+    List<RexNode> newProjects = new ArrayList<>();
+    RelDataType rowType = filterRel.getRowType();
+    List<RelDataTypeField> fieldList = rowType.getFieldList();
+    for (RelDataTypeField field : fieldList) {
+      RexInputRef inputRef = new RexInputRef(field.getIndex(), field.getType());
+      newProjects.add(inputRef);
+    }
 
+    DrillProjectRel wrapper = new DrillProjectRel(filterRel.getCluster(), filterRel.getTraitSet(), newFilter, newProjects, filterRel.getRowType());
     call.transformTo(wrapper);
   }
 
@@ -142,9 +238,9 @@ public class DrillFilterItemStarReWriterRule extends RelOptRule {
    * @param newFields new fields
    * @return new row type with original and new fields
    */
-  private RelDataType getNewRowType(RelDataTypeFactory typeFactory,
-                                    List<RelDataTypeField> originalFields,
-                                    Collection<String> newFields) {
+  private static RelDataType createNewRowType(RelDataTypeFactory typeFactory,
+                                              List<RelDataTypeField> originalFields,
+                                              Collection<String> newFields) {
     RelDataTypeHolder relDataTypeHolder = new RelDataTypeHolder();
 
     // add original fields
@@ -161,30 +257,55 @@ public class DrillFilterItemStarReWriterRule extends RelOptRule {
   }
 
   /**
-   * Constructs new scan based on the original scan.
-   * Preserves all original fields and add new fields.
+   * Creates new scan with fields from original scan and fields used in item star operator.
    *
-   * @param scanRel original scan
-   * @param newFields new fields
-   * @return new scan with original and new fields
+   * @param scanRel original scan expression
+   * @param itemStarFields item star fields
+   * @return new scan expression
+   */
+  private static DrillScanRel createNewScan(DrillScanRel scanRel, Map<String, DesiredField> itemStarFields) {
+    RelDataType newScanRowType = createNewRowType(
+            scanRel.getCluster().getTypeFactory(),
+            scanRel.getRowType().getFieldList(),
+            itemStarFields.keySet());
+
+    List<SchemaPath> columns = new ArrayList<>(scanRel.getColumns());
+    for (DesiredField desiredField : itemStarFields.values()) {
+      String name = desiredField.getName();
+      PathSegment.NameSegment nameSegment = new PathSegment.NameSegment(name);
+      columns.add(new SchemaPath(nameSegment));
+    }
+
+    return new DrillScanRel(
+            scanRel.getCluster(),
+            scanRel.getTraitSet().plus(DrillRel.DRILL_LOGICAL),
+            scanRel.getTable(),
+            newScanRowType,
+            columns);
+  }
+
+  /**
+   * Creates node mapper to replace item star calls with new input field references.
+   * Starting index should be calculated from the last used input expression (i.e. scan expression).
+   * NB: field reference index starts from 0 thus original field count can be taken as starting index
+   *
+   * @param desiredFields list of desired fields
+   * @param startingIndex starting index
+   * @return field mapper
    */
-  private RelNode constructNewScan(TableScan scanRel, Collection<String> newFields) {
-    // create new scan row type
-    RelDataType newScanRowType = getNewRowType(
-        scanRel.getCluster().getTypeFactory(),
-        scanRel.getRowType().getFieldList(),
-        newFields);
-
-    // create new scan
-    RelOptTable table = scanRel.getTable();
-    Class elementType = EnumerableTableScan.deduceElementType(table.unwrap(Table.class));
-
-    DrillTable unwrap = Utilities.getDrillTable(table);
-    DrillTranslatableTable newTable = new DrillTranslatableTable(
-        new DynamicDrillTable(unwrap.getPlugin(), unwrap.getStorageEngineName(), unwrap.getUserName(), unwrap.getSelection()));
-    RelOptTableImpl newOptTableImpl = RelOptTableImpl.create(table.getRelOptSchema(), newScanRowType, newTable, ImmutableList.<String>of());
-
-    return new EnumerableTableScan(scanRel.getCluster(), scanRel.getTraitSet(), newOptTableImpl, elementType);
+  private static Map<RexNode, Integer> createFieldMapper(Collection<DesiredField> desiredFields, int startingIndex) {
+    Map<RexNode, Integer> fieldMapper = new HashMap<>();
+
+    int index = startingIndex;
+    for (DesiredField desiredField : desiredFields) {
+      for (RexNode node : desiredField.getNodes()) {
+        // if field is referenced in more then one call, add each call to field mapper
+        fieldMapper.put(node, index);
+      }
+      // increment index for the next node reference
+      index++;
+    }
+    return fieldMapper;
   }
 
   /**
@@ -192,7 +313,7 @@ public class DrillFilterItemStarReWriterRule extends RelOptRule {
    * For the fields with the same name, stores original calls in a list, does not duplicate fields.
    * Holds state, should not be re-used.
    */
-  private class ItemStarFieldsVisitor extends RexVisitorImpl<RexNode> {
+  private static class ItemStarFieldsVisitor extends RexVisitorImpl<RexNode> {
 
     private final Map<String, DesiredField> itemStarFields = new HashMap<>();
     private final List<String> fieldNames;
@@ -202,8 +323,8 @@ public class DrillFilterItemStarReWriterRule extends RelOptRule {
       this.fieldNames = fieldNames;
     }
 
-    boolean hasItemStarFields() {
-      return !itemStarFields.isEmpty();
+    boolean hasNoItemStarFields() {
+      return itemStarFields.isEmpty();
     }
 
     Map<String, DesiredField> getItemStarFields() {

http://git-wip-us.apache.org/repos/asf/drill/blob/ddb73a50/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestPushDownAndPruningWithItemStar.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestPushDownAndPruningWithItemStar.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestPushDownAndPruningWithItemStar.java
index 24b9212..6ac08ee 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestPushDownAndPruningWithItemStar.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestPushDownAndPruningWithItemStar.java
@@ -44,7 +44,7 @@ public class TestPushDownAndPruningWithItemStar extends PlanTestBase {
   }
 
   @Test
-  public void testPushProjectIntoScan() throws Exception {
+  public void testPushProjectIntoScanWithGroupByClause() throws Exception {
     String query = String.format("select o_orderdate, count(*) from (select * from `%s`.`%s`) group by o_orderdate", DFS_TMP_SCHEMA, TABLE_NAME);
 
     String[] expectedPlan = {"numFiles=3, numRowGroups=3, usedMetadataFile=false, columns=\\[`o_orderdate`\\]"};
@@ -79,7 +79,7 @@ public class TestPushDownAndPruningWithItemStar extends PlanTestBase {
   public void testPushProjectIntoScanWithExpressionInFilter() throws Exception {
     String query = String.format("select o_orderdate from (select * from `%s`.`%s`) where o_custkey + o_orderkey < 5", DFS_TMP_SCHEMA, TABLE_NAME);
 
-    String[] expectedPlan = {"numFiles=3, numRowGroups=3, usedMetadataFile=false, columns=\\[`o_custkey`, `o_orderkey`, `o_orderdate`\\]"};
+    String[] expectedPlan = {"numFiles=3, numRowGroups=3, usedMetadataFile=false, columns=\\[`o_orderdate`, `o_custkey`, `o_orderkey`\\]"};
     String[] excludedPlan = {};
 
     PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPlan);
@@ -116,7 +116,7 @@ public class TestPushDownAndPruningWithItemStar extends PlanTestBase {
       "where t.user_info.cust_id > 28 and t.user_info.device = 'IOS5' and t.marketing_info.camp_id > 5 and t.marketing_info.keywords[2] is not null";
 
     String[] expectedPlan = {"numFiles=1, numRowGroups=1, usedMetadataFile=false, " +
-      "columns=\\[`user_info`.`cust_id`, `user_info`.`device`, `marketing_info`.`camp_id`, `marketing_info`.`keywords`\\[2\\], `trans_id`\\]"};
+      "columns=\\[`trans_id`, `user_info`.`cust_id`, `user_info`.`device`, `marketing_info`.`camp_id`, `marketing_info`.`keywords`\\[2\\]\\]"};
     String[] excludedPlan = {};
 
     PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPlan);
@@ -131,10 +131,43 @@ public class TestPushDownAndPruningWithItemStar extends PlanTestBase {
   }
 
   @Test
+  public void testProjectIntoScanWithNestedStarSubQuery() throws Exception {
+    String query = String.format("select *, o_orderdate from (select * from `%s`.`%s`)", DFS_TMP_SCHEMA, TABLE_NAME);
+
+    String[] expectedPlan = {"numFiles=3, numRowGroups=3, usedMetadataFile=false, columns=\\[`\\*\\*`, `o_orderdate`\\]"};
+    String[] excludedPlan = {};
+
+    PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPlan);
+
+    testBuilder()
+        .sqlQuery(query)
+        .unOrdered()
+        .sqlBaselineQuery("select *, o_orderdate from `%s`.`%s`", DFS_TMP_SCHEMA, TABLE_NAME)
+        .build();
+  }
+
+  @Test
+  public void testProjectIntoScanWithSeveralNestedStarSubQueries() throws Exception {
+    String subQuery = String.format("select * from `%s`.`%s`", DFS_TMP_SCHEMA, TABLE_NAME);
+    String query = String.format("select o_custkey + o_orderkey from (select * from (select * from (%s)))", subQuery);
+
+    String[] expectedPlan = {"numFiles=3, numRowGroups=3, usedMetadataFile=false, columns=\\[`o_custkey`, `o_orderkey`\\]"};
+    String[] excludedPlan = {};
+
+    PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPlan);
+
+    testBuilder()
+        .sqlQuery(query)
+        .unOrdered()
+        .sqlBaselineQuery("select o_custkey + o_orderkey from `%s`.`%s`", DFS_TMP_SCHEMA, TABLE_NAME)
+        .build();
+  }
+
+  @Test
   public void testDirectoryPruning() throws Exception {
     String query = String.format("select * from (select * from `%s`.`%s`) where dir0 = 't1'", DFS_TMP_SCHEMA, TABLE_NAME);
 
-    String[] expectedPlan = {"numFiles=1, numRowGroups=1, usedMetadataFile=false, columns=\\[`\\*\\*`\\]"};
+    String[] expectedPlan = {"numFiles=1, numRowGroups=1, usedMetadataFile=false, columns=\\[`\\*\\*`, `dir0`\\]"};
     String[] excludedPlan = {};
 
     PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPlan);
@@ -147,10 +180,45 @@ public class TestPushDownAndPruningWithItemStar extends PlanTestBase {
   }
 
   @Test
+  public void testDirectoryPruningWithNestedStarSubQuery() throws Exception {
+    String subQuery = String.format("select * from `%s`.`%s`", DFS_TMP_SCHEMA, TABLE_NAME);
+    String query = String.format("select * from (select * from (select * from (%s))) where dir0 = 't1'", subQuery);
+
+    String[] expectedPlan = {"numFiles=1, numRowGroups=1, usedMetadataFile=false, columns=\\[`\\*\\*`, `dir0`\\]"};
+    String[] excludedPlan = {};
+
+    PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPlan);
+
+    testBuilder()
+        .sqlQuery(query)
+        .unOrdered()
+        .sqlBaselineQuery("select * from `%s`.`%s` where dir0 = 't1'", DFS_TMP_SCHEMA, TABLE_NAME)
+        .build();
+  }
+
+    @Test
+    public void testDirectoryPruningWithNestedStarSubQueryAndAdditionalColumns() throws Exception {
+      String subQuery = String.format("select * from `%s`.`%s`", DFS_TMP_SCHEMA, TABLE_NAME);
+      String query = String.format("select * from (select * from (select *, `o_orderdate` from (%s))) where dir0 = 't1'", subQuery);
+
+      String[] expectedPlan = {"numFiles=1, numRowGroups=1, usedMetadataFile=false, columns=\\[`\\*\\*`, `o_orderdate`, `dir0`\\]"};
+      String[] excludedPlan = {};
+
+      PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPlan);
+
+      testBuilder()
+         .sqlQuery(query)
+         .unOrdered()
+         .sqlBaselineQuery("select * from `%s`.`%s` where dir0 = 't1'", DFS_TMP_SCHEMA, TABLE_NAME)
+         .build();
+    }
+
+
+    @Test
   public void testFilterPushDownSingleCondition() throws Exception {
     String query = String.format("select * from (select * from `%s`.`%s`) where o_orderdate = date '1992-01-01'", DFS_TMP_SCHEMA, TABLE_NAME);
 
-    String[] expectedPlan = {"numFiles=1, numRowGroups=1, usedMetadataFile=false, columns=\\[`\\*\\*`\\]"};
+    String[] expectedPlan = {"numFiles=1, numRowGroups=1, usedMetadataFile=false, columns=\\[`\\*\\*`, `o_orderdate`\\]"};
     String[] excludedPlan = {};
 
     PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPlan);
@@ -167,7 +235,7 @@ public class TestPushDownAndPruningWithItemStar extends PlanTestBase {
     String query = String.format("select * from (select * from `%s`.`%s`) where o_orderdate = date '1992-01-01' or o_orderdate = date '1992-01-09'",
         DFS_TMP_SCHEMA, TABLE_NAME);
 
-    String[] expectedPlan = {"numFiles=2, numRowGroups=2, usedMetadataFile=false, columns=\\[`\\*\\*`\\]"};
+    String[] expectedPlan = {"numFiles=2, numRowGroups=2, usedMetadataFile=false, columns=\\[`\\*\\*`, `o_orderdate`\\]"};
     String[] excludedPlan = {};
 
     PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPlan);
@@ -180,4 +248,38 @@ public class TestPushDownAndPruningWithItemStar extends PlanTestBase {
         .build();
   }
 
+  @Test
+  public void testFilterPushDownWithSeveralNestedStarSubQueries() throws Exception {
+    String subQuery = String.format("select * from `%s`.`%s`", DFS_TMP_SCHEMA, TABLE_NAME);
+    String query = String.format("select * from (select * from (select * from (%s))) where o_orderdate = date '1992-01-01'", subQuery);
+
+    String[] expectedPlan = {"numFiles=1, numRowGroups=1, usedMetadataFile=false, columns=\\[`\\*\\*`, `o_orderdate`\\]"};
+    String[] excludedPlan = {};
+
+    PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPlan);
+
+    testBuilder()
+        .sqlQuery(query)
+        .unOrdered()
+        .sqlBaselineQuery("select * from `%s`.`%s` where o_orderdate = date '1992-01-01'", DFS_TMP_SCHEMA, TABLE_NAME)
+        .build();
+  }
+
+  @Test
+  public void testFilterPushDownWithSeveralNestedStarSubQueriesWithAdditionalColumns() throws Exception {
+    String subQuery = String.format("select * from `%s`.`%s`", DFS_TMP_SCHEMA, TABLE_NAME);
+    String query = String.format("select * from (select * from (select *, o_custkey from (%s))) where o_orderdate = date '1992-01-01'", subQuery);
+
+    String[] expectedPlan = {"numFiles=1, numRowGroups=1, usedMetadataFile=false, columns=\\[`\\*\\*`, `o_custkey`, `o_orderdate`\\]"};
+    String[] excludedPlan = {};
+
+    PlanTestBase.testPlanMatchingPatterns(query, expectedPlan, excludedPlan);
+
+    testBuilder()
+        .sqlQuery(query)
+        .unOrdered()
+        .sqlBaselineQuery("select *, o_custkey from `%s`.`%s` where o_orderdate = date '1992-01-01'", DFS_TMP_SCHEMA, TABLE_NAME)
+        .build();
+  }
+
 }