You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by he...@apache.org on 2011/10/31 18:30:22 UTC

svn commit: r1195577 [2/2] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ jdbc/src/java/org/apache/hadoop/hive/jdbc/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/io/ ql/src/java/org/apache/hadoop/hiv...

Added: hive/trunk/ql/src/test/results/clientpositive/metadataonly1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/metadataonly1.q.out?rev=1195577&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/metadataonly1.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/metadataonly1.q.out Mon Oct 31 17:30:21 2011
@@ -0,0 +1,1518 @@
+PREHOOK: query: CREATE TABLE TEST1(A INT, B DOUBLE) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE TEST1(A INT, B DOUBLE) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@TEST1
+PREHOOK: query: explain extended select max(ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select max(ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME TEST1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION max (TOK_TABLE_OR_COL ds))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        test1 
+          TableScan
+            alias: test1
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: ds
+                    type: string
+              outputColumnNames: ds
+              Group By Operator
+                aggregations:
+                      expr: max(ds)
+                bucketGroup: false
+                mode: hash
+                outputColumnNames: _col0
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+      Needs Tagging: false
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: max(VALUE._col0)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              directory: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-12_585_1868075955473519664/-ext-10001
+              NumFilesPerFileSink: 1
+              Stats Publishing Key Prefix: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-12_585_1868075955473519664/-ext-10001/
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0
+                    columns.types string
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select max(ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-12_860_603571457300760090/-mr-10000
+POSTHOOK: query: select max(ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-12_860_603571457300760090/-mr-10000
+
+PREHOOK: query: alter table TEST1 add partition (ds='1')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@test1
+POSTHOOK: query: alter table TEST1 add partition (ds='1')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@test1
+POSTHOOK: Output: default@test1@ds=1
+PREHOOK: query: explain extended select max(ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select max(ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME TEST1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION max (TOK_TABLE_OR_COL ds))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        test1 
+          TableScan
+            alias: test1
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: ds
+                    type: string
+              outputColumnNames: ds
+              Group By Operator
+                aggregations:
+                      expr: max(ds)
+                bucketGroup: false
+                mode: hash
+                outputColumnNames: _col0
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+      Needs Tagging: false
+      Path -> Alias:
+        fake-path-metadata-only-query-default.test1{ds=1} [test1]
+      Path -> Partition:
+        fake-path-metadata-only-query-default.test1{ds=1} 
+          Partition
+            base file name: ds=1
+            input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 1
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=1
+              name default.test1
+              partition_columns ds
+              serialization.ddl struct test1 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319740998
+            serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1
+                name default.test1
+                partition_columns ds
+                serialization.ddl struct test1 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319740992
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test1
+            name: default.test1
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: max(VALUE._col0)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              directory: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-18_858_3586583087895818520/-ext-10001
+              NumFilesPerFileSink: 1
+              Stats Publishing Key Prefix: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-18_858_3586583087895818520/-ext-10001/
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0
+                    columns.types string
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select max(ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1@ds=1
+PREHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-18_974_2551947253137562059/-mr-10000
+POSTHOOK: query: select max(ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1@ds=1
+POSTHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-18_974_2551947253137562059/-mr-10000
+1
+PREHOOK: query: explain extended select count(distinct ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select count(distinct ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME TEST1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL ds))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        test1 
+          TableScan
+            alias: test1
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: ds
+                    type: string
+              outputColumnNames: ds
+              Group By Operator
+                aggregations:
+                      expr: count(DISTINCT ds)
+                bucketGroup: false
+                keys:
+                      expr: ds
+                      type: string
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: string
+                  sort order: +
+                  tag: -1
+                  value expressions:
+                        expr: _col1
+                        type: bigint
+      Needs Tagging: false
+      Path -> Alias:
+        fake-path-metadata-only-query-default.test1{ds=1} [test1]
+      Path -> Partition:
+        fake-path-metadata-only-query-default.test1{ds=1} 
+          Partition
+            base file name: ds=1
+            input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 1
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=1
+              name default.test1
+              partition_columns ds
+              serialization.ddl struct test1 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319740998
+            serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1
+                name default.test1
+                partition_columns ds
+                serialization.ddl struct test1 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319740992
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test1
+            name: default.test1
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: count(DISTINCT KEY._col0:0._col0)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: bigint
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              directory: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-24_614_8262716029839319533/-ext-10001
+              NumFilesPerFileSink: 1
+              Stats Publishing Key Prefix: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-24_614_8262716029839319533/-ext-10001/
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0
+                    columns.types bigint
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select count(distinct ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1@ds=1
+PREHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-24_727_7465805034346288209/-mr-10000
+POSTHOOK: query: select count(distinct ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1@ds=1
+POSTHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-24_727_7465805034346288209/-mr-10000
+1
+PREHOOK: query: explain extended select count(ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select count(ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME TEST1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL ds))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        test1 
+          TableScan
+            alias: test1
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: ds
+                    type: string
+              outputColumnNames: ds
+              Group By Operator
+                aggregations:
+                      expr: count(ds)
+                bucketGroup: false
+                mode: hash
+                outputColumnNames: _col0
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: bigint
+      Needs Tagging: false
+      Path -> Alias:
+        pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=1 [test1]
+      Path -> Partition:
+        pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=1 
+          Partition
+            base file name: ds=1
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 1
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=1
+              name default.test1
+              partition_columns ds
+              serialization.ddl struct test1 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319740998
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1
+                name default.test1
+                partition_columns ds
+                serialization.ddl struct test1 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319740992
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test1
+            name: default.test1
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: count(VALUE._col0)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: bigint
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              directory: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-30_281_2767795707655569681/-ext-10001
+              NumFilesPerFileSink: 1
+              Stats Publishing Key Prefix: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-30_281_2767795707655569681/-ext-10001/
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0
+                    columns.types bigint
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select count(ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1@ds=1
+PREHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-30_392_2741280504992120668/-mr-10000
+POSTHOOK: query: select count(ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1@ds=1
+POSTHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-30_392_2741280504992120668/-mr-10000
+0
+PREHOOK: query: alter table TEST1 add partition (ds='2')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@test1
+POSTHOOK: query: alter table TEST1 add partition (ds='2')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@test1
+POSTHOOK: Output: default@test1@ds=2
+PREHOOK: query: explain extended 
+select count(*) from TEST1 a2 join (select max(ds) m from TEST1) b on a2.ds=b.m
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended 
+select count(*) from TEST1 a2 join (select max(ds) m from TEST1) b on a2.ds=b.m
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME TEST1) a2) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME TEST1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION max (TOK_TABLE_OR_COL ds)) m)))) b) (= (. (TOK_TABLE_OR_COL a2) ds) (. (TOK_TABLE_OR_COL b) m)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTIONSTAR count)))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-3 depends on stages: Stage-2
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        b:test1 
+          TableScan
+            alias: test1
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: ds
+                    type: string
+              outputColumnNames: ds
+              Group By Operator
+                aggregations:
+                      expr: max(ds)
+                bucketGroup: false
+                mode: hash
+                outputColumnNames: _col0
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+      Needs Tagging: false
+      Path -> Alias:
+        fake-path-metadata-only-query-default.test1{ds=1} [b:test1]
+        fake-path-metadata-only-query-default.test1{ds=2} [b:test1]
+      Path -> Partition:
+        fake-path-metadata-only-query-default.test1{ds=1} 
+          Partition
+            base file name: ds=1
+            input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 1
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=1
+              name default.test1
+              partition_columns ds
+              serialization.ddl struct test1 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319740998
+            serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1
+                name default.test1
+                partition_columns ds
+                serialization.ddl struct test1 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319740992
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test1
+            name: default.test1
+        fake-path-metadata-only-query-default.test1{ds=2} 
+          Partition
+            base file name: ds=2
+            input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=2
+              name default.test1
+              partition_columns ds
+              serialization.ddl struct test1 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319741016
+            serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1
+                name default.test1
+                partition_columns ds
+                serialization.ddl struct test1 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319740992
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test1
+            name: default.test1
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: max(VALUE._col0)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              directory: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-36_629_6256982309049932082/-mr-10002
+              NumFilesPerFileSink: 1
+              table:
+                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  properties:
+                    columns _col0
+                    columns.types string
+                    escape.delim \
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-2
+    Map Reduce
+      Alias -> Map Operator Tree:
+        $INTNAME 
+            Reduce Output Operator
+              key expressions:
+                    expr: _col0
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: _col0
+                    type: string
+              tag: 1
+        a2 
+          TableScan
+            alias: a2
+            GatherStats: false
+            Reduce Output Operator
+              key expressions:
+                    expr: ds
+                    type: string
+              sort order: +
+              Map-reduce partition columns:
+                    expr: ds
+                    type: string
+              tag: 0
+      Needs Tagging: true
+      Path -> Alias:
+        file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-36_629_6256982309049932082/-mr-10002 [$INTNAME]
+        pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=1 [a2]
+        pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=2 [a2]
+      Path -> Partition:
+        file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-36_629_6256982309049932082/-mr-10002 
+          Partition
+            base file name: -mr-10002
+            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+            properties:
+              columns _col0
+              columns.types string
+              escape.delim \
+          
+              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+              properties:
+                columns _col0
+                columns.types string
+                escape.delim \
+        pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=1 
+          Partition
+            base file name: ds=1
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 1
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=1
+              name default.test1
+              partition_columns ds
+              serialization.ddl struct test1 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319740998
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1
+                name default.test1
+                partition_columns ds
+                serialization.ddl struct test1 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319740992
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test1
+            name: default.test1
+        pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=2 
+          Partition
+            base file name: ds=2
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=2
+              name default.test1
+              partition_columns ds
+              serialization.ddl struct test1 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319741016
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1
+                name default.test1
+                partition_columns ds
+                serialization.ddl struct test1 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319740992
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test1
+            name: default.test1
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          condition expressions:
+            0 
+            1 
+          handleSkewJoin: false
+          Select Operator
+            Group By Operator
+              aggregations:
+                    expr: count()
+              bucketGroup: false
+              mode: hash
+              outputColumnNames: _col0
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+                directory: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-36_629_6256982309049932082/-mr-10003
+                NumFilesPerFileSink: 1
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    properties:
+                      columns _col0
+                      columns.types bigint
+                      escape.delim \
+                TotalFiles: 1
+                GatherStats: false
+                MultiFileSpray: false
+
+  Stage: Stage-3
+    Map Reduce
+      Alias -> Map Operator Tree:
+        file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-36_629_6256982309049932082/-mr-10003 
+            Reduce Output Operator
+              sort order: 
+              tag: -1
+              value expressions:
+                    expr: _col0
+                    type: bigint
+      Needs Tagging: false
+      Path -> Alias:
+        file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-36_629_6256982309049932082/-mr-10003 [file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-36_629_6256982309049932082/-mr-10003]
+      Path -> Partition:
+        file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-36_629_6256982309049932082/-mr-10003 
+          Partition
+            base file name: -mr-10003
+            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+            properties:
+              columns _col0
+              columns.types bigint
+              escape.delim \
+          
+              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+              properties:
+                columns _col0
+                columns.types bigint
+                escape.delim \
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: count(VALUE._col0)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: bigint
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              directory: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-36_629_6256982309049932082/-ext-10001
+              NumFilesPerFileSink: 1
+              Stats Publishing Key Prefix: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-36_629_6256982309049932082/-ext-10001/
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0
+                    columns.types bigint
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select count(*) from TEST1 a2 join (select max(ds) m from TEST1) b on a2.ds=b.m
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1@ds=1
+PREHOOK: Input: default@test1@ds=2
+PREHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-36_904_6796791888883698844/-mr-10000
+POSTHOOK: query: select count(*) from TEST1 a2 join (select max(ds) m from TEST1) b on a2.ds=b.m
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1@ds=1
+POSTHOOK: Input: default@test1@ds=2
+POSTHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-36_904_6796791888883698844/-mr-10000
+0
+PREHOOK: query: CREATE TABLE TEST2(A INT, B DOUBLE) partitioned by (ds string, hr string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE TEST2(A INT, B DOUBLE) partitioned by (ds string, hr string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@TEST2
+PREHOOK: query: alter table TEST2 add partition (ds='1', hr='1')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@test2
+POSTHOOK: query: alter table TEST2 add partition (ds='1', hr='1')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@test2
+POSTHOOK: Output: default@test2@ds=1/hr=1
+PREHOOK: query: alter table TEST2 add partition (ds='1', hr='2')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@test2
+POSTHOOK: query: alter table TEST2 add partition (ds='1', hr='2')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@test2
+POSTHOOK: Output: default@test2@ds=1/hr=2
+PREHOOK: query: alter table TEST2 add partition (ds='1', hr='3')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@test2
+POSTHOOK: query: alter table TEST2 add partition (ds='1', hr='3')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@test2
+POSTHOOK: Output: default@test2@ds=1/hr=3
+PREHOOK: query: explain extended select ds, count(distinct hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds, count(distinct hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME TEST2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL ds)) (TOK_SELEXPR (TOK_FUNCTIONDI count (TOK_TABLE_OR_COL hr)))) (TOK_GROUPBY (TOK_TABLE_OR_COL ds))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        test2 
+          TableScan
+            alias: test2
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: ds
+                    type: string
+                    expr: hr
+                    type: string
+              outputColumnNames: ds, hr
+              Group By Operator
+                aggregations:
+                      expr: count(DISTINCT hr)
+                bucketGroup: false
+                keys:
+                      expr: ds
+                      type: string
+                      expr: hr
+                      type: string
+                mode: hash
+                outputColumnNames: _col0, _col1, _col2
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: string
+                        expr: _col1
+                        type: string
+                  sort order: ++
+                  Map-reduce partition columns:
+                        expr: _col0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: _col2
+                        type: bigint
+      Needs Tagging: false
+      Path -> Alias:
+        fake-path-metadata-only-query-default.test2{ds=1, hr=1} [test2]
+        fake-path-metadata-only-query-default.test2{ds=1, hr=2} [test2]
+        fake-path-metadata-only-query-default.test2{ds=1, hr=3} [test2]
+      Path -> Partition:
+        fake-path-metadata-only-query-default.test2{ds=1, hr=1} 
+          Partition
+            base file name: hr=1
+            input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 1
+              hr 1
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2/ds=1/hr=1
+              name default.test2
+              partition_columns ds/hr
+              serialization.ddl struct test2 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319741034
+            serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2
+                name default.test2
+                partition_columns ds/hr
+                serialization.ddl struct test2 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319741034
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test2
+            name: default.test2
+        fake-path-metadata-only-query-default.test2{ds=1, hr=2} 
+          Partition
+            base file name: hr=2
+            input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 1
+              hr 2
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2/ds=1/hr=2
+              name default.test2
+              partition_columns ds/hr
+              serialization.ddl struct test2 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319741034
+            serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2
+                name default.test2
+                partition_columns ds/hr
+                serialization.ddl struct test2 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319741034
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test2
+            name: default.test2
+        fake-path-metadata-only-query-default.test2{ds=1, hr=3} 
+          Partition
+            base file name: hr=3
+            input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 1
+              hr 3
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2/ds=1/hr=3
+              name default.test2
+              partition_columns ds/hr
+              serialization.ddl struct test2 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319741034
+            serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2
+                name default.test2
+                partition_columns ds/hr
+                serialization.ddl struct test2 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319741034
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test2
+            name: default.test2
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: count(DISTINCT KEY._col1:0._col0)
+          bucketGroup: false
+          keys:
+                expr: KEY._col0
+                type: string
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col1
+                  type: bigint
+            outputColumnNames: _col0, _col1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              directory: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-54_867_6911418128650417746/-ext-10001
+              NumFilesPerFileSink: 1
+              Stats Publishing Key Prefix: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-54_867_6911418128650417746/-ext-10001/
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0,_col1
+                    columns.types string:bigint
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select ds, count(distinct hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test2@ds=1/hr=1
+PREHOOK: Input: default@test2@ds=1/hr=2
+PREHOOK: Input: default@test2@ds=1/hr=3
+PREHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-55_001_7910447671567792543/-mr-10000
+POSTHOOK: query: select ds, count(distinct hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test2@ds=1/hr=1
+POSTHOOK: Input: default@test2@ds=1/hr=2
+POSTHOOK: Input: default@test2@ds=1/hr=3
+POSTHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-43-55_001_7910447671567792543/-mr-10000
+1	3
+PREHOOK: query: explain extended select ds, count(hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds, count(hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME TEST2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL ds)) (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL hr)))) (TOK_GROUPBY (TOK_TABLE_OR_COL ds))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        test2 
+          TableScan
+            alias: test2
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: ds
+                    type: string
+                    expr: hr
+                    type: string
+              outputColumnNames: ds, hr
+              Group By Operator
+                aggregations:
+                      expr: count(hr)
+                bucketGroup: false
+                keys:
+                      expr: ds
+                      type: string
+                mode: hash
+                outputColumnNames: _col0, _col1
+                Reduce Output Operator
+                  key expressions:
+                        expr: _col0
+                        type: string
+                  sort order: +
+                  Map-reduce partition columns:
+                        expr: _col0
+                        type: string
+                  tag: -1
+                  value expressions:
+                        expr: _col1
+                        type: bigint
+      Needs Tagging: false
+      Path -> Alias:
+        pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2/ds=1/hr=1 [test2]
+        pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2/ds=1/hr=2 [test2]
+        pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2/ds=1/hr=3 [test2]
+      Path -> Partition:
+        pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2/ds=1/hr=1 
+          Partition
+            base file name: hr=1
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 1
+              hr 1
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2/ds=1/hr=1
+              name default.test2
+              partition_columns ds/hr
+              serialization.ddl struct test2 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319741034
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2
+                name default.test2
+                partition_columns ds/hr
+                serialization.ddl struct test2 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319741034
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test2
+            name: default.test2
+        pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2/ds=1/hr=2 
+          Partition
+            base file name: hr=2
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 1
+              hr 2
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2/ds=1/hr=2
+              name default.test2
+              partition_columns ds/hr
+              serialization.ddl struct test2 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319741034
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2
+                name default.test2
+                partition_columns ds/hr
+                serialization.ddl struct test2 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319741034
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test2
+            name: default.test2
+        pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2/ds=1/hr=3 
+          Partition
+            base file name: hr=3
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 1
+              hr 3
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2/ds=1/hr=3
+              name default.test2
+              partition_columns ds/hr
+              serialization.ddl struct test2 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319741034
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test2
+                name default.test2
+                partition_columns ds/hr
+                serialization.ddl struct test2 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319741034
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test2
+            name: default.test2
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: count(VALUE._col0)
+          bucketGroup: false
+          keys:
+                expr: KEY._col0
+                type: string
+          mode: mergepartial
+          outputColumnNames: _col0, _col1
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+                  expr: _col1
+                  type: bigint
+            outputColumnNames: _col0, _col1
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              directory: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-44-00_613_3770359973599871660/-ext-10001
+              NumFilesPerFileSink: 1
+              Stats Publishing Key Prefix: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-44-00_613_3770359973599871660/-ext-10001/
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0,_col1
+                    columns.types string:bigint
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select ds, count(hr) from TEST2 group by ds
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test2@ds=1/hr=1
+PREHOOK: Input: default@test2@ds=1/hr=2
+PREHOOK: Input: default@test2@ds=1/hr=3
+PREHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-44-00_729_5714201473330188446/-mr-10000
+POSTHOOK: query: select ds, count(hr) from TEST2 group by ds
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test2@ds=1/hr=1
+POSTHOOK: Input: default@test2@ds=1/hr=2
+POSTHOOK: Input: default@test2@ds=1/hr=3
+POSTHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-44-00_729_5714201473330188446/-mr-10000
+PREHOOK: query: explain extended select max(ds) from TEST1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select max(ds) from TEST1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME TEST1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION max (TOK_TABLE_OR_COL ds))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        test1 
+          TableScan
+            alias: test1
+            GatherStats: false
+            Select Operator
+              expressions:
+                    expr: ds
+                    type: string
+              outputColumnNames: ds
+              Group By Operator
+                aggregations:
+                      expr: max(ds)
+                bucketGroup: false
+                mode: hash
+                outputColumnNames: _col0
+                Reduce Output Operator
+                  sort order: 
+                  tag: -1
+                  value expressions:
+                        expr: _col0
+                        type: string
+      Needs Tagging: false
+      Path -> Alias:
+        fake-path-metadata-only-query-default.test1{ds=1} [test1]
+        fake-path-metadata-only-query-default.test1{ds=2} [test1]
+      Path -> Partition:
+        fake-path-metadata-only-query-default.test1{ds=1} 
+          Partition
+            base file name: ds=1
+            input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 1
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=1
+              name default.test1
+              partition_columns ds
+              serialization.ddl struct test1 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319740998
+            serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1
+                name default.test1
+                partition_columns ds
+                serialization.ddl struct test1 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319740992
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test1
+            name: default.test1
+        fake-path-metadata-only-query-default.test1{ds=2} 
+          Partition
+            base file name: ds=2
+            input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2
+            properties:
+              bucket_count -1
+              columns a,b
+              columns.types int:double
+              file.inputformat org.apache.hadoop.mapred.TextInputFormat
+              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1/ds=2
+              name default.test1
+              partition_columns ds
+              serialization.ddl struct test1 { i32 a, double b}
+              serialization.format 1
+              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              transient_lastDdlTime 1319741016
+            serde: org.apache.hadoop.hive.serde2.NullStructSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns a,b
+                columns.types int:double
+                file.inputformat org.apache.hadoop.mapred.TextInputFormat
+                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                location pfile:/Users/njain/hive/hive2/build/ql/test/data/warehouse/test1
+                name default.test1
+                partition_columns ds
+                serialization.ddl struct test1 { i32 a, double b}
+                serialization.format 1
+                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                transient_lastDdlTime 1319740992
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.test1
+            name: default.test1
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations:
+                expr: max(VALUE._col0)
+          bucketGroup: false
+          mode: mergepartial
+          outputColumnNames: _col0
+          Select Operator
+            expressions:
+                  expr: _col0
+                  type: string
+            outputColumnNames: _col0
+            File Output Operator
+              compressed: false
+              GlobalTableId: 0
+              directory: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-44-06_562_9209841409592787703/-ext-10001
+              NumFilesPerFileSink: 1
+              Stats Publishing Key Prefix: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-44-06_562_9209841409592787703/-ext-10001/
+              table:
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    columns _col0
+                    columns.types string
+                    escape.delim \
+                    serialization.format 1
+              TotalFiles: 1
+              GatherStats: false
+              MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: select max(ds) from TEST1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test1@ds=1
+PREHOOK: Input: default@test1@ds=2
+PREHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-44-06_683_5969170878727093036/-mr-10000
+POSTHOOK: query: select max(ds) from TEST1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test1@ds=1
+POSTHOOK: Input: default@test1@ds=2
+POSTHOOK: Output: file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-10-27_11-44-06_683_5969170878727093036/-mr-10000
+2

Added: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java?rev=1195577&view=auto
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java (added)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/NullStructSerDe.java Mon Oct 31 17:30:21 2011
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Writable;
+
+/**
+ * Placeholder SerDe for cases where neither serialization nor deserialization is needed
+ *
+ */
+public class NullStructSerDe implements SerDe {
+
+  class NullStructField implements StructField {
+    @Override
+    public String getFieldName() {
+      return null;
+    }
+
+    @Override
+    public ObjectInspector getFieldObjectInspector() {
+      return null;
+    }
+
+    @Override
+    public String getFieldComment() {
+      return "";
+    }
+  }
+
+  @Override
+  public Object deserialize(Writable blob) throws SerDeException {
+    return null;
+  }
+
+  @Override
+  public ObjectInspector getObjectInspector() throws SerDeException {
+    return new StructObjectInspector() {
+      public String getTypeName() {
+        return "null";
+      }
+      public Category getCategory() {
+        return Category.PRIMITIVE;
+      }
+      @Override
+      public StructField getStructFieldRef(String fieldName) {
+        return null;
+      }
+      @Override
+      public List<NullStructField> getAllStructFieldRefs() {
+        return new ArrayList<NullStructField>();
+      }
+      @Override
+      public Object getStructFieldData(Object data, StructField fieldRef) {
+        return null;
+      }
+      @Override
+      public List<Object> getStructFieldsDataAsList(Object data) {
+        return new ArrayList<Object>();
+      }
+    };
+  }
+
+  @Override
+  public SerDeStats getSerDeStats() {
+    return null;
+  }
+
+  @Override
+  public void initialize(Configuration conf, Properties tbl) throws SerDeException {
+  }
+
+  @Override
+  public Class<? extends Writable> getSerializedClass() {
+    return NullWritable.class;
+  }
+
+  @Override
+  public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
+    return NullWritable.get();
+  }
+
+}