You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@drill.apache.org by GitBox <gi...@apache.org> on 2018/10/08 12:44:15 UTC

[GitHub] asfgit closed pull request #1492: DRILL-6773: The renamed schema with aliases is not shown for queries on empty directories

asfgit closed pull request #1492: DRILL-6773: The renamed schema with aliases is not shown for queries on empty directories
URL: https://github.com/apache/drill/pull/1492
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBPushFilterIntoScan.java b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBPushFilterIntoScan.java
index 0690531b3d6..c233a6bcf07 100644
--- a/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBPushFilterIntoScan.java
+++ b/contrib/format-maprdb/src/main/java/org/apache/drill/exec/store/mapr/db/MapRDBPushFilterIntoScan.java
@@ -141,7 +141,7 @@ protected void doPushFilterIntoJsonGroupScan(RelOptRuleCall call,
     final JsonTableGroupScan newGroupsScan = groupScan.clone(newScanSpec);
     newGroupsScan.setFilterPushedDown(true);
 
-    final ScanPrel newScanPrel = ScanPrel.create(scan, filter.getTraitSet(), newGroupsScan, scan.getRowType());
+    final ScanPrel newScanPrel = new ScanPrel(scan, filter.getTraitSet(), newGroupsScan, scan.getRowType());
 
     // Depending on whether is a project in the middle, assign either scan or copy of project to childRel.
     final RelNode childRel = project == null ? newScanPrel : project.copy(project.getTraitSet(), ImmutableList.of((RelNode)newScanPrel));
@@ -187,7 +187,7 @@ protected void doPushFilterIntoBinaryGroupScan(final RelOptRuleCall call,
                                                                         groupScan.getTableStats());
     newGroupsScan.setFilterPushedDown(true);
 
-    final ScanPrel newScanPrel = ScanPrel.create(scan, filter.getTraitSet(), newGroupsScan, scan.getRowType());
+    final ScanPrel newScanPrel = new ScanPrel(scan, filter.getTraitSet(), newGroupsScan, scan.getRowType());
 
     // Depending on whether is a project in the middle, assign either scan or copy of project to childRel.
     final RelNode childRel = project == null ? newScanPrel : project.copy(project.getTraitSet(), ImmutableList.of((RelNode)newScanPrel));;
diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBasePushFilterIntoScan.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBasePushFilterIntoScan.java
index 692268fe2e5..736b36b80f7 100644
--- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBasePushFilterIntoScan.java
+++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBasePushFilterIntoScan.java
@@ -122,7 +122,7 @@ protected void doPushFilterToScan(final RelOptRuleCall call, final FilterPrel fi
         newScanSpec, groupScan.getColumns());
     newGroupsScan.setFilterPushedDown(true);
 
-    final ScanPrel newScanPrel = ScanPrel.create(scan, filter.getTraitSet(), newGroupsScan, scan.getRowType());
+    final ScanPrel newScanPrel = new ScanPrel(scan, filter.getTraitSet(), newGroupsScan, scan.getRowType());
 
     // Depending on whether is a project in the middle, assign either scan or copy of project to childRel.
     final RelNode childRel = project == null ? newScanPrel : project.copy(project.getTraitSet(), ImmutableList.of(newScanPrel));
diff --git a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaPushDownFilterIntoScan.java b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaPushDownFilterIntoScan.java
index 0e04f0f31aa..14abadff963 100644
--- a/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaPushDownFilterIntoScan.java
+++ b/contrib/storage-kafka/src/main/java/org/apache/drill/exec/store/kafka/KafkaPushDownFilterIntoScan.java
@@ -66,7 +66,7 @@ public void onMatch(RelOptRuleCall call) {
 
     logger.info("Partitions ScanSpec after pushdown: " + newScanSpec);
     GroupScan newGroupScan = groupScan.cloneWithNewSpec(newScanSpec);
-    final ScanPrel newScanPrel = ScanPrel.create(scan, filter.getTraitSet(), newGroupScan, scan.getRowType());
+    final ScanPrel newScanPrel = new ScanPrel(scan, filter.getTraitSet(), newGroupScan, scan.getRowType());
     call.transformTo(filter.copy(filter.getTraitSet(), ImmutableList.of(newScanPrel)));
   }
 
diff --git a/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoPushDownFilterForScan.java b/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoPushDownFilterForScan.java
index 540af071ee5..afe32f225a0 100644
--- a/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoPushDownFilterForScan.java
+++ b/contrib/storage-mongo/src/main/java/org/apache/drill/exec/store/mongo/MongoPushDownFilterForScan.java
@@ -77,7 +77,7 @@ public void onMatch(RelOptRuleCall call) {
     }
     newGroupsScan.setFilterPushedDown(true);
 
-    final ScanPrel newScanPrel = ScanPrel.create(scan, filter.getTraitSet(),
+    final ScanPrel newScanPrel = new ScanPrel(scan, filter.getTraitSet(),
         newGroupsScan, scan.getRowType());
     if (mongoFilterBuilder.isAllExpressionsConverted()) {
       /*
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/SchemalessScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/SchemalessScan.java
index 1db83f579e0..d227b04aa50 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/SchemalessScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/SchemalessScan.java
@@ -92,6 +92,12 @@ public ScanStats getScanStats() {
     return ScanStats.ZERO_RECORD_TABLE;
   }
 
+
+  @Override
+  public boolean canPushdownProjects(List<SchemaPath> columns) {
+    return false;
+  }
+
   @Override
   public boolean supportsPartitionFilterPushdown() {
     return false;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
index 8ea15d3cb89..a051d994156 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
@@ -876,7 +876,7 @@ else if (exprHasPrefix && refHasPrefix) {
    */
   @Override
   protected IterOutcome handleNullInput() {
-    if (! popConfig.isOutputProj()) {
+    if (!popConfig.isOutputProj()) {
       return super.handleNullInput();
     }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ConvertCountToDirectScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ConvertCountToDirectScan.java
index 5ef97b99cd0..f8cda8c55d5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ConvertCountToDirectScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ConvertCountToDirectScan.java
@@ -128,7 +128,7 @@ public void onMatch(RelOptRuleCall call) {
     final ScanStats scanStats = new ScanStats(ScanStats.GroupScanProperty.EXACT_ROW_COUNT, 1, 1, scanRowType.getFieldCount());
     final GroupScan directScan = new MetadataDirectGroupScan(reader, oldGrpScan.getFiles(), scanStats);
 
-    final ScanPrel newScan = ScanPrel.create(scan,
+    final ScanPrel newScan = new ScanPrel(scan,
         scan.getTraitSet().plus(Prel.DRILL_PHYSICAL).plus(DrillDistributionTrait.SINGLETON), directScan,
         scanRowType);
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ProjectAllowDupPrel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ProjectAllowDupPrel.java
index 55e6eaadc8a..07936f3b482 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ProjectAllowDupPrel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ProjectAllowDupPrel.java
@@ -39,13 +39,18 @@
 public class ProjectAllowDupPrel extends ProjectPrel {
 
   public ProjectAllowDupPrel(RelOptCluster cluster, RelTraitSet traits, RelNode child, List<RexNode> exps,
-      RelDataType rowType) {
-    super(cluster, traits, child, exps, rowType);
+                             RelDataType rowType) {
+    this(cluster, traits, child, exps, rowType, false);
+  }
+
+  public ProjectAllowDupPrel(RelOptCluster cluster, RelTraitSet traits, RelNode child, List<RexNode> exps,
+      RelDataType rowType, boolean outputProj) {
+    super(cluster, traits, child, exps, rowType, outputProj);
   }
 
   @Override
   public ProjectAllowDupPrel copy(RelTraitSet traitSet, RelNode input, List<RexNode> exps, RelDataType rowType) {
-    return new ProjectAllowDupPrel(getCluster(), traitSet, input, exps, rowType);
+    return new ProjectAllowDupPrel(getCluster(), traitSet, input, exps, rowType, outputProj);
   }
 
   @Override
@@ -54,7 +59,8 @@ public PhysicalOperator getPhysicalOperator(PhysicalPlanCreator creator) throws
 
     PhysicalOperator childPOP = child.getPhysicalOperator(creator);
 
-    Project p = new Project(this.getProjectExpressions(new DrillParseContext(PrelUtil.getSettings(getCluster()))),  childPOP);
+    Project p = new Project(this.getProjectExpressions(new DrillParseContext(PrelUtil.getSettings(getCluster()))),
+        childPOP, outputProj);
     return creator.addMetadata(this, p);
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ProjectPrel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ProjectPrel.java
index 4f06b647cf3..395e7783659 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ProjectPrel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ProjectPrel.java
@@ -48,7 +48,7 @@
 public class ProjectPrel extends DrillProjectRelBase implements Prel{
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ProjectPrel.class);
 
-  private final boolean outputProj;
+  protected final boolean outputProj;
 
   public ProjectPrel(RelOptCluster cluster, RelTraitSet traits, RelNode child, List<RexNode> exps,
       RelDataType rowType) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ScanPrel.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ScanPrel.java
index a2655b36e9b..ff901c9ba69 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ScanPrel.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ScanPrel.java
@@ -48,8 +48,11 @@
   protected final GroupScan groupScan;
   private final RelDataType rowType;
 
-  public ScanPrel(RelOptCluster cluster, RelTraitSet traits,
-      GroupScan groupScan, RelDataType rowType) {
+  public ScanPrel(RelNode old, RelTraitSet traitSets, GroupScan scan, RelDataType rowType) {
+    this(old.getCluster(), traitSets, scan, rowType);
+  }
+
+  public ScanPrel(RelOptCluster cluster, RelTraitSet traits, GroupScan groupScan, RelDataType rowType) {
     super(cluster, traits);
     this.groupScan = getCopy(groupScan);
     this.rowType = rowType;
@@ -86,11 +89,6 @@ public GroupScan getGroupScan() {
     return groupScan;
   }
 
-  public static ScanPrel create(RelNode old, RelTraitSet traitSets,
-      GroupScan scan, RelDataType rowType) {
-    return new ScanPrel(old.getCluster(), traitSets, getCopy(scan), rowType);
-  }
-
   @Override
   public RelWriter explainTerms(RelWriter pw) {
     return super.explainTerms(pw).item("groupscan", groupScan.getDigest());
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ScanPrule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ScanPrule.java
index 4f66e90aaf7..57c4517aac6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ScanPrule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/ScanPrule.java
@@ -44,7 +44,7 @@ public void onMatch(RelOptRuleCall call) {
 
     final RelTraitSet traits = scan.getTraitSet().plus(Prel.DRILL_PHYSICAL).plus(partition);
 
-    final DrillScanPrel newScan = ScanPrel.create(scan, traits, groupScan, scan.getRowType());
+    final DrillScanPrel newScan = new ScanPrel(scan, traits, groupScan, scan.getRowType());
 
     call.transformTo(newScan);
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/StarColumnConverter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/StarColumnConverter.java
index ac5ec822b11..91eeffee6ce 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/StarColumnConverter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/StarColumnConverter.java
@@ -76,12 +76,12 @@ public Prel visitScreen(ScreenPrel prel, Void value) throws RuntimeException {
     Prel child = ((Prel) prel.getInput(0)).accept(this, null);
 
     if (prefixedForStar) {
-      if (!prefixedForWriter) {
+      if (prefixedForWriter) {
+        // Prefix is added under CTAS Writer. We need create a new Screen with the converted child.
+        return prel.copy(prel.getTraitSet(), Collections.singletonList(child));
+      } else {
         // Prefix is added for SELECT only, not for CTAS writer.
         return insertProjUnderScreenOrWriter(prel, prel.getInput().getRowType(), child);
-      } else {
-        // Prefix is added under CTAS Writer. We need create a new Screen with the converted child.
-        return prel.copy(prel.getTraitSet(), Collections.<RelNode>singletonList(child));
       }
     } else {
       // No prefix is
@@ -118,13 +118,23 @@ private Prel insertProjUnderScreenOrWriter(Prel prel, RelDataType origRowType, P
     RelDataType newRowType = RexUtil.createStructType(child.getCluster().getTypeFactory(),
         exprs, origRowType.getFieldNames(), null);
 
-    int fieldCount = prel.getRowType().isStruct()? prel.getRowType().getFieldCount():1;
+    int fieldCount = prel.getRowType().isStruct() ? prel.getRowType().getFieldCount() : 1;
 
     // Insert PUS/PUW : remove the prefix and keep the original field name.
-    if (fieldCount > 1) { // // no point in allowing duplicates if we only have one column
-      proj = new ProjectAllowDupPrel(child.getCluster(), child.getTraitSet(), child, exprs, newRowType);
+    if (fieldCount > 1) { // no point in allowing duplicates if we only have one column
+      proj = new ProjectAllowDupPrel(child.getCluster(),
+          child.getTraitSet(),
+          child,
+          exprs,
+          newRowType,
+          true); //outputProj = true : will allow to build the schema for PUS Project, see ProjectRecordBatch#handleNullInput()
     } else {
-      proj = new ProjectPrel(child.getCluster(), child.getTraitSet(), child, exprs, newRowType);
+      proj = new ProjectPrel(child.getCluster(),
+          child.getTraitSet(),
+          child,
+          exprs,
+          newRowType,
+          true); //outputProj = true : will allow to build the schema for PUS Project, see ProjectRecordBatch#handleNullInput()
     }
 
     children.add(proj);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
index bad161756b5..c75311f4ff1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
@@ -506,6 +506,7 @@ protected Prel convertToPrel(RelNode drel, RelDataType validatedRowType) throws
      * The rest of projects will remove the duplicate column when we generate POP in json format.
      */
     phyRelNode = StarColumnConverter.insertRenameProject(phyRelNode);
+    log("Physical RelNode after Top and Rename Project inserting: ", phyRelNode, logger, null);
 
     /*
      * 2.)
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaPushFilterIntoRecordGenerator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaPushFilterIntoRecordGenerator.java
index 3b62daeaa6a..dae5c284646 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaPushFilterIntoRecordGenerator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaPushFilterIntoRecordGenerator.java
@@ -99,7 +99,7 @@ protected void doMatch(RelOptRuleCall call, ScanPrel scan, ProjectPrel project,
     final InfoSchemaGroupScan newGroupsScan = new InfoSchemaGroupScan(groupScan.getTable(), infoSchemaFilter);
     newGroupsScan.setFilterPushedDown(true);
 
-    RelNode input = ScanPrel.create(scan, filter.getTraitSet(), newGroupsScan, scan.getRowType());
+    RelNode input = new ScanPrel(scan, filter.getTraitSet(), newGroupsScan, scan.getRowType());
     if (project != null) {
       input = project.copy(project.getTraitSet(), input, project.getProjects(), filter.getRowType());
     }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetPushDownFilter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetPushDownFilter.java
index 2057ac56277..e58d306cdd1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetPushDownFilter.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetPushDownFilter.java
@@ -167,7 +167,7 @@ protected void doOnMatch(RelOptRuleCall call, FilterPrel filter, ProjectPrel pro
       return;
     }
 
-    RelNode newScan = ScanPrel.create(scan, scan.getTraitSet(), newGroupScan, scan.getRowType());
+    RelNode newScan = new ScanPrel(scan, scan.getTraitSet(), newGroupScan, scan.getRowType());
 
     if (project != null) {
       newScan = project.copy(project.getTraitSet(), ImmutableList.of(newScan));
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java b/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java
index d1d5654a01b..91859397285 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java
@@ -1248,7 +1248,9 @@ public void testUnionAllLeftEmptyDir() throws Exception {
 
   @Test
   public void testUnionAllBothEmptyDirs() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder().build();
+    final BatchSchema expectedSchema = new SchemaBuilder()
+        .addNullable("key", TypeProtos.MinorType.INT)
+        .build();
 
     testBuilder()
         .sqlQuery("SELECT key FROM dfs.tmp.`%1$s` UNION ALL SELECT key FROM dfs.tmp.`%1$s`", EMPTY_DIR_NAME)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestUnionDistinct.java b/exec/java-exec/src/test/java/org/apache/drill/TestUnionDistinct.java
index db43512758c..59108179c9b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestUnionDistinct.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestUnionDistinct.java
@@ -826,7 +826,9 @@ public void testUnionLeftEmptyDir() throws Exception {
 
   @Test
   public void testUnionBothEmptyDirs() throws Exception {
-    final BatchSchema expectedSchema = new SchemaBuilder().build();
+    final BatchSchema expectedSchema = new SchemaBuilder()
+        .addNullable("key", TypeProtos.MinorType.INT)
+        .build();
 
     testBuilder()
         .sqlQuery("SELECT key FROM dfs.tmp.`%1$s` UNION SELECT key FROM dfs.tmp.`%1$s`", EMPTY_DIR_NAME)
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java b/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
index 1512059a59b..2b0eb42f24c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/TestEmptyInputSql.java
@@ -222,12 +222,9 @@ public void testEmptyDirectory() throws Exception {
 
   @Test
   public void testEmptyDirectoryAndFieldInQuery() throws Exception {
-    final List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema = Lists.newArrayList();
-    final TypeProtos.MajorType majorType = TypeProtos.MajorType.newBuilder()
-        .setMinorType(TypeProtos.MinorType.INT) // field "key" is absent in schemaless table
-        .setMode(TypeProtos.DataMode.OPTIONAL)
+    final BatchSchema expectedSchema = new SchemaBuilder()
+        .addNullable("key", TypeProtos.MinorType.INT)
         .build();
-    expectedSchema.add(Pair.of(SchemaPath.getSimplePath("key"), majorType));
 
     testBuilder()
         .sqlQuery("select key from dfs.tmp.`%s`", EMPTY_DIR_NAME)
@@ -236,6 +233,36 @@ public void testEmptyDirectoryAndFieldInQuery() throws Exception {
         .run();
   }
 
+  @Test
+  public void testRenameProjectEmptyDirectory() throws Exception {
+    final BatchSchema expectedSchema = new SchemaBuilder()
+        .addNullable("WeekId", TypeProtos.MinorType.INT)
+        .addNullable("ProductName", TypeProtos.MinorType.INT)
+        .build();
+
+    testBuilder()
+        .sqlQuery("select WeekId, Product as ProductName from (select CAST(`dir0` as INT) AS WeekId, " +
+            "Product from dfs.tmp.`%s`)", EMPTY_DIR_NAME)
+        .schemaBaseLine(expectedSchema)
+        .build()
+        .run();
+  }
+
+  @Test
+  public void testRenameProjectEmptyJson() throws Exception {
+    final BatchSchema expectedSchema = new SchemaBuilder()
+        .addNullable("WeekId", TypeProtos.MinorType.INT)
+        .addNullable("ProductName", TypeProtos.MinorType.INT)
+        .build();
+
+    testBuilder()
+        .sqlQuery("select WeekId, Product as ProductName from (select CAST(`dir0` as INT) AS WeekId, " +
+            "Product from cp.`%s`)", SINGLE_EMPTY_JSON)
+        .schemaBaseLine(expectedSchema)
+        .build()
+        .run();
+  }
+
   @Test
   public void testEmptyDirectoryPlanSerDe() throws Exception {
     String query = String.format("select * from dfs.tmp.`%s`", EMPTY_DIR_NAME);


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services