You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/07/26 17:50:58 UTC

svn commit: r1507357 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java test/queries/clientpositive/dynamic_partition_skip_default.q test/results/clientpositive/dynamic_partition_skip_default.q.out

Author: hashutosh
Date: Fri Jul 26 15:50:58 2013
New Revision: 1507357

URL: http://svn.apache.org/r1507357
Log:
HIVE-4878 : With Dynamic partitioning, some queries would scan default partition even if query is not using it. (Laljo John Pullokkaran via Ashutosh Chauhan)

Added:
    hive/trunk/ql/src/test/queries/clientpositive/dynamic_partition_skip_default.q
    hive/trunk/ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java?rev=1507357&r1=1507356&r2=1507357&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java Fri Jul 26 15:50:58 2013
@@ -217,7 +217,7 @@ public class PartitionPruner implements 
               LOG.info(ErrorMsg.INVALID_JDO_FILTER_EXPRESSION.getMsg("by condition '"
                   + message + "'"));
               pruneBySequentialScan(tab, true_parts, unkn_parts, denied_parts,
-                  prunerExpr, rowObjectInspector);
+                  prunerExpr, rowObjectInspector, conf);
             }
           }
         }
@@ -300,10 +300,11 @@ public class PartitionPruner implements 
    * @param denied_parts pruned out partitions.
    * @param prunerExpr the SQL predicate that involves partition columns.
    * @param rowObjectInspector object inspector used by the evaluator
+   * @param conf Hive Configuration object, can not be NULL.
    * @throws Exception
    */
   static private void pruneBySequentialScan(Table tab, Set<Partition> true_parts, Set<Partition> unkn_parts,
-      Set<Partition> denied_parts, ExprNodeDesc prunerExpr, StructObjectInspector rowObjectInspector)
+      Set<Partition> denied_parts, ExprNodeDesc prunerExpr, StructObjectInspector rowObjectInspector, HiveConf conf)
       throws Exception {
 
     List<String> trueNames = null;
@@ -320,6 +321,7 @@ public class PartitionPruner implements 
     List<String> partCols = new ArrayList<String>(pCols.size());
     List<String> values = new ArrayList<String>(pCols.size());
     Object[] objectWithPart = new Object[2];
+    String defaultPartitionName = conf.getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME);
 
     for (FieldSchema pCol : pCols) {
       partCols.add(pCol.getName());
@@ -344,11 +346,17 @@ public class PartitionPruner implements 
       Boolean r = (Boolean) PartExprEvalUtils.evaluateExprOnPart(handle, objectWithPart);
 
       if (r == null) {
-        if (unknNames == null) {
-          unknNames = new LinkedList<String>();
+        // Reject default partitions if we couldn't determine whether we should include it or not.
+        // Note that predicate would only contains partition column parts of original predicate.
+        if (values.contains(defaultPartitionName)) {
+          LOG.debug("skipping default/bad partition: " + partName);
+        }else {
+          if (unknNames == null) {
+            unknNames = new LinkedList<String>();
+          }
+          unknNames.add(partName);
+          LOG.debug("retained unknown partition: " + partName);
         }
-        unknNames.add(partName);
-        LOG.debug("retained unknown partition: " + partName);
       } else if (Boolean.TRUE.equals(r)) {
         if (trueNames == null) {
           trueNames = new LinkedList<String>();

Added: hive/trunk/ql/src/test/queries/clientpositive/dynamic_partition_skip_default.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/dynamic_partition_skip_default.q?rev=1507357&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/dynamic_partition_skip_default.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/dynamic_partition_skip_default.q Fri Jul 26 15:50:58 2013
@@ -0,0 +1,19 @@
+create table dynamic_part_table(intcol int) partitioned by (partcol1 int, partcol2 int);
+
+set hive.exec.dynamic.partition.mode=nonstrict;
+
+insert into table dynamic_part_table partition(partcol1, partcol2) select 1, 1, 1 from src where key=150;
+
+insert into table dynamic_part_table partition(partcol1, partcol2) select 1, NULL, 1 from src where key=150;
+
+insert into table dynamic_part_table partition(partcol1, partcol2) select 1, 1, NULL from src where key=150;
+
+insert into table dynamic_part_table partition(partcol1, partcol2) select 1, NULL, NULL from src where key=150;
+
+explain extended select intcol from dynamic_part_table where partcol1=1 and partcol2=1;
+
+set hive.exec.dynamic.partition.mode=strict;
+
+explain extended select intcol from dynamic_part_table where partcol1=1 and partcol2=1;
+
+explain extended select intcol from dynamic_part_table where (partcol1=1 and partcol2=1)or (partcol1=1 and partcol2='__HIVE_DEFAULT_PARTITION__');

Added: hive/trunk/ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out?rev=1507357&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out Fri Jul 26 15:50:58 2013
@@ -0,0 +1,406 @@
+PREHOOK: query: create table dynamic_part_table(intcol int) partitioned by (partcol1 int, partcol2 int)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table dynamic_part_table(intcol int) partitioned by (partcol1 int, partcol2 int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dynamic_part_table
+PREHOOK: query: insert into table dynamic_part_table partition(partcol1, partcol2) select 1, 1, 1 from src where key=150
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dynamic_part_table
+POSTHOOK: query: insert into table dynamic_part_table partition(partcol1, partcol2) select 1, 1, 1 from src where key=150
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dynamic_part_table@partcol1=1/partcol2=1
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=1,partcol2=1).intcol SIMPLE []
+PREHOOK: query: insert into table dynamic_part_table partition(partcol1, partcol2) select 1, NULL, 1 from src where key=150
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dynamic_part_table
+POSTHOOK: query: insert into table dynamic_part_table partition(partcol1, partcol2) select 1, NULL, 1 from src where key=150
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dynamic_part_table@partcol1=__HIVE_DEFAULT_PARTITION__/partcol2=1
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=1,partcol2=1).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=__HIVE_DEFAULT_PARTITION__,partcol2=1).intcol SIMPLE []
+PREHOOK: query: insert into table dynamic_part_table partition(partcol1, partcol2) select 1, 1, NULL from src where key=150
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dynamic_part_table
+POSTHOOK: query: insert into table dynamic_part_table partition(partcol1, partcol2) select 1, 1, NULL from src where key=150
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dynamic_part_table@partcol1=1/partcol2=__HIVE_DEFAULT_PARTITION__
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=1,partcol2=1).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=1,partcol2=__HIVE_DEFAULT_PARTITION__).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=__HIVE_DEFAULT_PARTITION__,partcol2=1).intcol SIMPLE []
+PREHOOK: query: insert into table dynamic_part_table partition(partcol1, partcol2) select 1, NULL, NULL from src where key=150
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dynamic_part_table
+POSTHOOK: query: insert into table dynamic_part_table partition(partcol1, partcol2) select 1, NULL, NULL from src where key=150
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dynamic_part_table@partcol1=__HIVE_DEFAULT_PARTITION__/partcol2=__HIVE_DEFAULT_PARTITION__
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=1,partcol2=1).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=1,partcol2=__HIVE_DEFAULT_PARTITION__).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=__HIVE_DEFAULT_PARTITION__,partcol2=1).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=__HIVE_DEFAULT_PARTITION__,partcol2=__HIVE_DEFAULT_PARTITION__).intcol SIMPLE []
+PREHOOK: query: explain extended select intcol from dynamic_part_table where partcol1=1 and partcol2=1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select intcol from dynamic_part_table where partcol1=1 and partcol2=1
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=1,partcol2=1).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=1,partcol2=__HIVE_DEFAULT_PARTITION__).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=__HIVE_DEFAULT_PARTITION__,partcol2=1).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=__HIVE_DEFAULT_PARTITION__,partcol2=__HIVE_DEFAULT_PARTITION__).intcol SIMPLE []
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME dynamic_part_table))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL intcol))) (TOK_WHERE (and (= (TOK_TABLE_OR_COL partcol1) 1) (= (TOK_TABLE_OR_COL partcol2) 1)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        dynamic_part_table 
+          TableScan
+            alias: dynamic_part_table
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: intcol
+                    type: int
+              outputColumnNames: _col0
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      columns _col0
+                      columns.types int
+                      escape.delim \
+                      hive.serialization.extend.nesting.levels true
+                      serialization.format 1
+                TotalFiles: 1
+                GatherStats: false
+                MultiFileSpray: false
+      Needs Tagging: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: partcol2=1
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              partcol1 1
+              partcol2 1
+            properties:
+              bucket_count -1
+              columns intcol
+              columns.types int
+#### A masked pattern was here ####
+              name default.dynamic_part_table
+              numFiles 1
+              numRows 1
+              partition_columns partcol1/partcol2
+              rawDataSize 1
+              serialization.ddl struct dynamic_part_table { i32 intcol}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 2
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns intcol
+                columns.types int
+#### A masked pattern was here ####
+                name default.dynamic_part_table
+                numFiles 4
+                numPartitions 4
+                numRows 4
+                partition_columns partcol1/partcol2
+                rawDataSize 4
+                serialization.ddl struct dynamic_part_table { i32 intcol}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 8
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dynamic_part_table
+            name: default.dynamic_part_table
+      Truncated Path -> Alias:
+        /dynamic_part_table/partcol1=1/partcol2=1 [dynamic_part_table]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: explain extended select intcol from dynamic_part_table where partcol1=1 and partcol2=1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select intcol from dynamic_part_table where partcol1=1 and partcol2=1
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=1,partcol2=1).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=1,partcol2=__HIVE_DEFAULT_PARTITION__).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=__HIVE_DEFAULT_PARTITION__,partcol2=1).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=__HIVE_DEFAULT_PARTITION__,partcol2=__HIVE_DEFAULT_PARTITION__).intcol SIMPLE []
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME dynamic_part_table))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL intcol))) (TOK_WHERE (and (= (TOK_TABLE_OR_COL partcol1) 1) (= (TOK_TABLE_OR_COL partcol2) 1)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        dynamic_part_table 
+          TableScan
+            alias: dynamic_part_table
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: intcol
+                    type: int
+              outputColumnNames: _col0
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      columns _col0
+                      columns.types int
+                      escape.delim \
+                      hive.serialization.extend.nesting.levels true
+                      serialization.format 1
+                TotalFiles: 1
+                GatherStats: false
+                MultiFileSpray: false
+      Needs Tagging: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: partcol2=1
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              partcol1 1
+              partcol2 1
+            properties:
+              bucket_count -1
+              columns intcol
+              columns.types int
+#### A masked pattern was here ####
+              name default.dynamic_part_table
+              numFiles 1
+              numRows 1
+              partition_columns partcol1/partcol2
+              rawDataSize 1
+              serialization.ddl struct dynamic_part_table { i32 intcol}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 2
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns intcol
+                columns.types int
+#### A masked pattern was here ####
+                name default.dynamic_part_table
+                numFiles 4
+                numPartitions 4
+                numRows 4
+                partition_columns partcol1/partcol2
+                rawDataSize 4
+                serialization.ddl struct dynamic_part_table { i32 intcol}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 8
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dynamic_part_table
+            name: default.dynamic_part_table
+      Truncated Path -> Alias:
+        /dynamic_part_table/partcol1=1/partcol2=1 [dynamic_part_table]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: explain extended select intcol from dynamic_part_table where (partcol1=1 and partcol2=1)or (partcol1=1 and partcol2='__HIVE_DEFAULT_PARTITION__')
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select intcol from dynamic_part_table where (partcol1=1 and partcol2=1)or (partcol1=1 and partcol2='__HIVE_DEFAULT_PARTITION__')
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=1,partcol2=1).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=1,partcol2=__HIVE_DEFAULT_PARTITION__).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=__HIVE_DEFAULT_PARTITION__,partcol2=1).intcol SIMPLE []
+POSTHOOK: Lineage: dynamic_part_table PARTITION(partcol1=__HIVE_DEFAULT_PARTITION__,partcol2=__HIVE_DEFAULT_PARTITION__).intcol SIMPLE []
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME dynamic_part_table))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL intcol))) (TOK_WHERE (or (and (= (TOK_TABLE_OR_COL partcol1) 1) (= (TOK_TABLE_OR_COL partcol2) 1)) (and (= (TOK_TABLE_OR_COL partcol1) 1) (= (TOK_TABLE_OR_COL partcol2) '__HIVE_DEFAULT_PARTITION__'))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        dynamic_part_table 
+          TableScan
+            alias: dynamic_part_table
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: intcol
+                    type: int
+              outputColumnNames: _col0
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      columns _col0
+                      columns.types int
+                      escape.delim \
+                      hive.serialization.extend.nesting.levels true
+                      serialization.format 1
+                TotalFiles: 1
+                GatherStats: false
+                MultiFileSpray: false
+      Needs Tagging: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: partcol2=1
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              partcol1 1
+              partcol2 1
+            properties:
+              bucket_count -1
+              columns intcol
+              columns.types int
+#### A masked pattern was here ####
+              name default.dynamic_part_table
+              numFiles 1
+              numRows 1
+              partition_columns partcol1/partcol2
+              rawDataSize 1
+              serialization.ddl struct dynamic_part_table { i32 intcol}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 2
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns intcol
+                columns.types int
+#### A masked pattern was here ####
+                name default.dynamic_part_table
+                numFiles 4
+                numPartitions 4
+                numRows 4
+                partition_columns partcol1/partcol2
+                rawDataSize 4
+                serialization.ddl struct dynamic_part_table { i32 intcol}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 8
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dynamic_part_table
+            name: default.dynamic_part_table
+#### A masked pattern was here ####
+          Partition
+            base file name: partcol2=__HIVE_DEFAULT_PARTITION__
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              partcol1 1
+              partcol2 __HIVE_DEFAULT_PARTITION__
+            properties:
+              bucket_count -1
+              columns intcol
+              columns.types int
+#### A masked pattern was here ####
+              name default.dynamic_part_table
+              numFiles 1
+              numRows 1
+              partition_columns partcol1/partcol2
+              rawDataSize 1
+              serialization.ddl struct dynamic_part_table { i32 intcol}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 2
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns intcol
+                columns.types int
+#### A masked pattern was here ####
+                name default.dynamic_part_table
+                numFiles 4
+                numPartitions 4
+                numRows 4
+                partition_columns partcol1/partcol2
+                rawDataSize 4
+                serialization.ddl struct dynamic_part_table { i32 intcol}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 8
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.dynamic_part_table
+            name: default.dynamic_part_table
+      Truncated Path -> Alias:
+        /dynamic_part_table/partcol1=1/partcol2=1 [dynamic_part_table]
+        /dynamic_part_table/partcol1=1/partcol2=__HIVE_DEFAULT_PARTITION__ [dynamic_part_table]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+