You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2014/08/26 19:04:23 UTC

svn commit: r1620660 - in /hive/branches/spark: itests/src/test/resources/ ql/src/test/results/clientpositive/spark/

Author: brock
Date: Tue Aug 26 17:04:23 2014
New Revision: 1620660

URL: http://svn.apache.org/r1620660
Log:
HIVE-7793 - Enable tests on Spark branch (3) [Sparch Branch] (Chengxiang Li via Brock)

Added:
    hive/branches/spark/ql/src/test/results/clientpositive/spark/script_env_var1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/script_env_var2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/script_pipe.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/temp_table.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/transform1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/transform2.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out
    hive/branches/spark/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out
Modified:
    hive/branches/spark/itests/src/test/resources/testconfiguration.properties

Modified: hive/branches/spark/itests/src/test/resources/testconfiguration.properties
URL: http://svn.apache.org/viewvc/hive/branches/spark/itests/src/test/resources/testconfiguration.properties?rev=1620660&r1=1620659&r2=1620660&view=diff
==============================================================================
--- hive/branches/spark/itests/src/test/resources/testconfiguration.properties (original)
+++ hive/branches/spark/itests/src/test/resources/testconfiguration.properties Tue Aug 26 17:04:23 2014
@@ -389,8 +389,12 @@ spark.query.files=alter_merge_orc.q \
   sample6.q \
   sample7.q \
   sample9.q \
+  script_env_var1.q \
+  script_env_var2.q \
+  script_pipe.q \
   sort.q \
   spark_test.q \
+  temp_table.q \
   timestamp_1.q \
   timestamp_2.q \
   timestamp_3.q \
@@ -398,6 +402,10 @@ spark.query.files=alter_merge_orc.q \
   timestamp_lazy.q \
   timestamp_null.q \
   timestamp_udf.q \
+  transform_ppr1.q \
+  transform_ppr2.q \
+  transform1.q \
+  transform2.q \
   union.q \
   union10.q \
   union11.q \

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/script_env_var1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/script_env_var1.q.out?rev=1620660&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/script_env_var1.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/script_env_var1.q.out Tue Aug 26 17:04:23 2014
@@ -0,0 +1,18 @@
+PREHOOK: query: -- Verifies that script operator ID environment variables have unique values
+-- in each instance of the script operator.
+SELECT count(1) FROM
+( SELECT TRANSFORM('echo $HIVE_SCRIPT_OPERATOR_ID') USING 'sh' AS key FROM src LIMIT 1 UNION ALL
+  SELECT TRANSFORM('echo $HIVE_SCRIPT_OPERATOR_ID') USING 'sh' AS key FROM src LIMIT 1 ) a GROUP BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- Verifies that script operator ID environment variables have unique values
+-- in each instance of the script operator.
+SELECT count(1) FROM
+( SELECT TRANSFORM('echo $HIVE_SCRIPT_OPERATOR_ID') USING 'sh' AS key FROM src LIMIT 1 UNION ALL
+  SELECT TRANSFORM('echo $HIVE_SCRIPT_OPERATOR_ID') USING 'sh' AS key FROM src LIMIT 1 ) a GROUP BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+1
+1

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/script_env_var2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/script_env_var2.q.out?rev=1620660&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/script_env_var2.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/script_env_var2.q.out Tue Aug 26 17:04:23 2014
@@ -0,0 +1,16 @@
+PREHOOK: query: -- Same test as script_env_var1, but test setting the variable name
+SELECT count(1) FROM
+( SELECT TRANSFORM('echo $MY_ID') USING 'sh' AS key FROM src LIMIT 1 UNION ALL
+  SELECT TRANSFORM('echo $MY_ID') USING 'sh' AS key FROM src LIMIT 1 ) a GROUP BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- Same test as script_env_var1, but test setting the variable name
+SELECT count(1) FROM
+( SELECT TRANSFORM('echo $MY_ID') USING 'sh' AS key FROM src LIMIT 1 UNION ALL
+  SELECT TRANSFORM('echo $MY_ID') USING 'sh' AS key FROM src LIMIT 1 ) a GROUP BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+1
+1

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/script_pipe.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/script_pipe.q.out?rev=1620660&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/script_pipe.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/script_pipe.q.out Tue Aug 26 17:04:23 2014
@@ -0,0 +1,129 @@
+PREHOOK: query: -- Tests exception in ScriptOperator.close() by passing to the operator a small amount of data
+EXPLAIN SELECT TRANSFORM(*) USING 'true' AS a, b, c FROM (SELECT * FROM src LIMIT 1) tmp
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Tests exception in ScriptOperator.close() by passing to the operator a small amount of data
+EXPLAIN SELECT TRANSFORM(*) USING 'true' AS a, b, c FROM (SELECT * FROM src LIMIT 1) tmp
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (GROUP)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                    Limit
+                      Number of rows: 1
+                      Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        sort order: 
+                        Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                        value expressions: _col0 (type: string), _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: VALUE._col0 (type: string), VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 1
+                  Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: string), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                    Transform Operator
+                      command: true
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: -- Tests exception in ScriptOperator.processOp() by passing extra data needed to fill pipe buffer
+EXPLAIN SELECT TRANSFORM(key, value, key, value, key, value, key, value, key, value, key, value) USING 'head -n 1' as a,b,c,d FROM src
+PREHOOK: type: QUERY
+POSTHOOK: query: -- Tests exception in ScriptOperator.processOp() by passing extra data needed to fill pipe buffer
+EXPLAIN SELECT TRANSFORM(key, value, key, value, key, value, key, value, key, value, key, value) USING 'head -n 1' as a,b,c,d FROM src
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Select Operator
+                    expressions: key (type: string), value (type: string), key (type: string), value (type: string), key (type: string), value (type: string), key (type: string), value (type: string), key (type: string), value (type: string), key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11
+                    Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                    Transform Operator
+                      command: head -n 1
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT TRANSFORM(*) USING 'true' AS a, b, c FROM (SELECT * FROM src LIMIT 1) tmp
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT TRANSFORM(*) USING 'true' AS a, b, c FROM (SELECT * FROM src LIMIT 1) tmp
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+PREHOOK: query: SELECT TRANSFORM(key, value, key, value, key, value, key, value, key, value, key, value) USING 'head -n 1' as a,b,c,d FROM src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT TRANSFORM(key, value, key, value, key, value, key, value, key, value, key, value) USING 'head -n 1' as a,b,c,d FROM src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+238	val_238	238	val_238

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/temp_table.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/temp_table.q.out?rev=1620660&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/temp_table.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/temp_table.q.out Tue Aug 26 17:04:23 2014
@@ -0,0 +1,417 @@
+PREHOOK: query: EXPLAIN CREATE TEMPORARY TABLE foo AS SELECT * FROM src WHERE key % 2 = 0
+PREHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: query: EXPLAIN CREATE TEMPORARY TABLE foo AS SELECT * FROM src WHERE key % 2 = 0
+POSTHOOK: type: CREATETABLE_AS_SELECT
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-4 depends on stages: Stage-2, Stage-0
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: ((key % 2) = 0) (type: boolean)
+                    Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.foo
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-4
+      Create Table Operator:
+        Create Table
+          columns: key string, value string
+          input format: org.apache.hadoop.mapred.TextInputFormat
+#### A masked pattern was here ####
+          output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          name: foo
+          isTemporary: true
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+  Stage: Stage-0
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: CREATE TEMPORARY TABLE foo AS SELECT * FROM src WHERE key % 2 = 0
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: CREATE TEMPORARY TABLE foo AS SELECT * FROM src WHERE key % 2 = 0
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@foo
+PREHOOK: query: EXPLAIN CREATE TEMPORARY TABLE bar AS SELECT * FROM src WHERE key % 2 = 1
+PREHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: query: EXPLAIN CREATE TEMPORARY TABLE bar AS SELECT * FROM src WHERE key % 2 = 1
+POSTHOOK: type: CREATETABLE_AS_SELECT
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-4 depends on stages: Stage-2, Stage-0
+  Stage-3 depends on stages: Stage-4
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: ((key % 2) = 1) (type: boolean)
+                    Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            name: default.bar
+
+  Stage: Stage-2
+    Dependency Collection
+
+  Stage: Stage-4
+      Create Table Operator:
+        Create Table
+          columns: key string, value string
+          input format: org.apache.hadoop.mapred.TextInputFormat
+#### A masked pattern was here ####
+          output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+          serde name: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          name: bar
+          isTemporary: true
+
+  Stage: Stage-3
+    Stats-Aggr Operator
+
+  Stage: Stage-0
+    Move Operator
+      files:
+          hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: CREATE TEMPORARY TABLE bar AS SELECT * FROM src WHERE key % 2 = 1
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@src
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: CREATE TEMPORARY TABLE bar AS SELECT * FROM src WHERE key % 2 = 1
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@bar
+PREHOOK: query: DESCRIBE foo
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@foo
+POSTHOOK: query: DESCRIBE foo
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@foo
+key                 	string              	                    
+value               	string              	                    
+PREHOOK: query: DESCRIBE bar
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@bar
+POSTHOOK: query: DESCRIBE bar
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@bar
+key                 	string              	                    
+value               	string              	                    
+PREHOOK: query: explain select * from foo limit 10
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select * from foo limit 10
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: 10
+      Processor Tree:
+        TableScan
+          alias: foo
+          Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE
+          Select Operator
+            expressions: key (type: string), value (type: string)
+            outputColumnNames: _col0, _col1
+            Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE
+            Limit
+              Number of rows: 10
+              Statistics: Num rows: 10 Data size: 2040 Basic stats: COMPLETE Column stats: NONE
+              ListSink
+
+PREHOOK: query: select * from foo limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@foo
+#### A masked pattern was here ####
+POSTHOOK: query: select * from foo limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@foo
+#### A masked pattern was here ####
+238	val_238
+86	val_86
+278	val_278
+98	val_98
+484	val_484
+150	val_150
+224	val_224
+66	val_66
+128	val_128
+146	val_146
+PREHOOK: query: explain select * from (select * from foo union all select * from bar) u order by key limit 10
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select * from (select * from foo union all select * from bar) u order by key limit 10
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 3 <- Union 2 (GROUP SORT)
+        Union 2 <- Map 1 (NONE), Map 4 (NONE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: bar
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Select Operator
+                      expressions: _col0 (type: string), _col1 (type: string)
+                      outputColumnNames: _col0, _col1
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        value expressions: _col1 (type: string)
+        Map 4 
+            Map Operator Tree:
+                TableScan
+                  alias: foo
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Select Operator
+                      expressions: _col0 (type: string), _col1 (type: string)
+                      outputColumnNames: _col0, _col1
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        value expressions: _col1 (type: string)
+        Reducer 3 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 28 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 2070 Basic stats: COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 10 Data size: 2070 Basic stats: COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+        Union 2 
+            Vertex: Union 2
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 10
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select * from (select * from foo union all select * from bar) u order by key limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@bar
+PREHOOK: Input: default@foo
+#### A masked pattern was here ####
+POSTHOOK: query: select * from (select * from foo union all select * from bar) u order by key limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@bar
+POSTHOOK: Input: default@foo
+#### A masked pattern was here ####
+0	val_0
+0	val_0
+0	val_0
+10	val_10
+100	val_100
+100	val_100
+103	val_103
+103	val_103
+104	val_104
+104	val_104
+PREHOOK: query: CREATE TEMPORARY TABLE baz LIKE foo
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@baz
+POSTHOOK: query: CREATE TEMPORARY TABLE baz LIKE foo
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@baz
+PREHOOK: query: INSERT OVERWRITE TABLE baz SELECT * from foo
+PREHOOK: type: QUERY
+PREHOOK: Input: default@foo
+PREHOOK: Output: default@baz
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: INSERT OVERWRITE TABLE baz SELECT * from foo
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@foo
+POSTHOOK: Output: default@baz
+POSTHOOK: Lineage: baz.key SIMPLE [(foo)foo.FieldSchema(name:key, type:string, comment:null), ]
+POSTHOOK: Lineage: baz.value SIMPLE [(foo)foo.FieldSchema(name:value, type:string, comment:null), ]
+PREHOOK: query: CREATE TEMPORARY TABLE bay (key string, value string) STORED AS orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@bay
+POSTHOOK: query: CREATE TEMPORARY TABLE bay (key string, value string) STORED AS orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@bay
+PREHOOK: query: select * from bay
+PREHOOK: type: QUERY
+PREHOOK: Input: default@bay
+#### A masked pattern was here ####
+POSTHOOK: query: select * from bay
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@bay
+#### A masked pattern was here ####
+PREHOOK: query: INSERT OVERWRITE TABLE bay SELECT * FROM src ORDER BY key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@bay
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: INSERT OVERWRITE TABLE bay SELECT * FROM src ORDER BY key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@bay
+POSTHOOK: Lineage: bay.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: bay.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: select * from bay limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@bay
+#### A masked pattern was here ####
+POSTHOOK: query: select * from bay limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@bay
+#### A masked pattern was here ####
+0	val_0
+0	val_0
+0	val_0
+10	val_10
+100	val_100
+100	val_100
+103	val_103
+103	val_103
+104	val_104
+104	val_104
+PREHOOK: query: SHOW TABLES
+PREHOOK: type: SHOWTABLES
+POSTHOOK: query: SHOW TABLES
+POSTHOOK: type: SHOWTABLES
+alltypesorc
+bar
+bay
+baz
+foo
+src
+src1
+src_json
+src_sequencefile
+src_thrift
+srcbucket
+srcbucket2
+srcpart
+PREHOOK: query: CREATE DATABASE two
+PREHOOK: type: CREATEDATABASE
+PREHOOK: Output: database:two
+POSTHOOK: query: CREATE DATABASE two
+POSTHOOK: type: CREATEDATABASE
+POSTHOOK: Output: database:two
+PREHOOK: query: USE two
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:two
+POSTHOOK: query: USE two
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:two
+PREHOOK: query: SHOW TABLES
+PREHOOK: type: SHOWTABLES
+POSTHOOK: query: SHOW TABLES
+POSTHOOK: type: SHOWTABLES
+PREHOOK: query: CREATE TEMPORARY TABLE foo AS SELECT * FROM default.foo
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@foo
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: CREATE TEMPORARY TABLE foo AS SELECT * FROM default.foo
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@foo
+POSTHOOK: Output: two@foo
+PREHOOK: query: SHOW TABLES
+PREHOOK: type: SHOWTABLES
+POSTHOOK: query: SHOW TABLES
+POSTHOOK: type: SHOWTABLES
+foo
+PREHOOK: query: use default
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:default
+POSTHOOK: query: use default
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:default
+PREHOOK: query: DROP DATABASE two CASCADE
+PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:two
+PREHOOK: Output: database:two
+PREHOOK: Output: two@foo
+POSTHOOK: query: DROP DATABASE two CASCADE
+POSTHOOK: type: DROPDATABASE
+POSTHOOK: Input: database:two
+POSTHOOK: Output: database:two
+POSTHOOK: Output: two@foo
+PREHOOK: query: DROP TABLE bay
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@bay
+PREHOOK: Output: default@bay
+POSTHOOK: query: DROP TABLE bay
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@bay
+POSTHOOK: Output: default@bay

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/transform1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/transform1.q.out?rev=1620660&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/transform1.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/transform1.q.out Tue Aug 26 17:04:23 2014
@@ -0,0 +1,135 @@
+PREHOOK: query: create table transform1_t1(a string, b string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@transform1_t1
+POSTHOOK: query: create table transform1_t1(a string, b string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@transform1_t1
+PREHOOK: query: EXPLAIN
+SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: transform1_t1
+                  Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                  Select Operator
+                    expressions: a (type: string), b (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@transform1_t1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@transform1_t1
+#### A masked pattern was here ####
+PREHOOK: query: create table transform1_t2(col array<int>)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@transform1_t2
+POSTHOOK: query: create table transform1_t2(col array<int>)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@transform1_t2
+PREHOOK: query: insert overwrite table transform1_t2
+select array(1,2,3) from src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@transform1_t2
+[Error 30017]: Skipping stats aggregation by error org.apache.hadoop.hive.ql.metadata.HiveException: [Error 30015]: Stats aggregator of type counter cannot be connected to
+POSTHOOK: query: insert overwrite table transform1_t2
+select array(1,2,3) from src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@transform1_t2
+POSTHOOK: Lineage: transform1_t2.col EXPRESSION []
+PREHOOK: query: EXPLAIN
+SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM transform1_t2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM transform1_t2
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: transform1_t2
+                  Statistics: Num rows: -1 Data size: 6 Basic stats: PARTIAL Column stats: COMPLETE
+                  Select Operator
+                    expressions: '012' (type: string)
+                    outputColumnNames: _col0
+                    Statistics: Num rows: -1 Data size: 6 Basic stats: PARTIAL Column stats: COMPLETE
+                    Transform Operator
+                      command: cat
+                      output info:
+                          input format: org.apache.hadoop.mapred.TextInputFormat
+                          output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      Statistics: Num rows: -1 Data size: 6 Basic stats: PARTIAL Column stats: COMPLETE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: -1 Data size: 6 Basic stats: PARTIAL Column stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.TextInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM transform1_t2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@transform1_t2
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM transform1_t2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@transform1_t2
+#### A masked pattern was here ####
+[0,1,2]

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/transform2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/transform2.q.out?rev=1620660&view=auto
==============================================================================
--- hive/branches/spark/ql/src/test/results/clientpositive/spark/transform2.q.out (added)
+++ hive/branches/spark/ql/src/test/results/clientpositive/spark/transform2.q.out Tue Aug 26 17:04:23 2014
@@ -0,0 +1,11 @@
+PREHOOK: query: -- Transform with a function that has many parameters
+SELECT TRANSFORM(substr(key, 1, 2)) USING 'cat' FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- Transform with a function that has many parameters
+SELECT TRANSFORM(substr(key, 1, 2)) USING 'cat' FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+23	NULL

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out?rev=1620660&view=auto
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out (added) and hive/branches/spark/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out Tue Aug 26 17:04:23 2014 differ

Added: hive/branches/spark/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out?rev=1620660&view=auto
==============================================================================
Files hive/branches/spark/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out (added) and hive/branches/spark/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out Tue Aug 26 17:04:23 2014 differ