You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by xu...@apache.org on 2015/08/04 15:04:00 UTC

hive git commit: HIVE-11430: Followup HIVE-10166: investigate and fix the two test failures (reviewed by Jason)

Repository: hive
Updated Branches:
  refs/heads/master 5bb2506f8 -> 290ff1f5a


HIVE-11430: Followup HIVE-10166: investigate and fix the two test failures (reviewed by Jason)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/290ff1f5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/290ff1f5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/290ff1f5

Branch: refs/heads/master
Commit: 290ff1f5af3d6852563833f83b0f64c611d3ccab
Parents: 5bb2506
Author: Xuefu Zhang <xz...@Cloudera.com>
Authored: Tue Aug 4 06:03:51 2015 -0700
Committer: Xuefu Zhang <xz...@Cloudera.com>
Committed: Tue Aug 4 06:03:51 2015 -0700

----------------------------------------------------------------------
 .../clientpositive/convert_enum_to_string.q.out |  9 ++--
 .../clientpositive/dynamic_rdd_cache.q.out      | 52 +++++++++++---------
 2 files changed, 34 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/290ff1f5/ql/src/test/results/clientpositive/convert_enum_to_string.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/convert_enum_to_string.q.out b/ql/src/test/results/clientpositive/convert_enum_to_string.q.out
index ee33631..a46857f 100644
--- a/ql/src/test/results/clientpositive/convert_enum_to_string.q.out
+++ b/ql/src/test/results/clientpositive/convert_enum_to_string.q.out
@@ -37,16 +37,15 @@ my_binary           	struct<hb:binary,offset:int,isreadonly:boolean,bigendian:bo
 my_string_string_map	map<string,string>  	from deserializer   
 my_string_enum_map  	map<string,string>  	from deserializer   
 my_enum_string_map  	map<string,string>  	from deserializer   
-my_enum_struct_map  	map<string,struct<my_string:string,my_enum:string,optionals:struct<>>>	from deserializer   
+my_enum_struct_map  	map<string,struct<my_string:string,my_enum:string>>	from deserializer   
 my_enum_stringlist_map	map<string,array<string>>	from deserializer   
-my_enum_structlist_map	map<string,array<struct<my_string:string,my_enum:string,optionals:struct<>>>>	from deserializer   
+my_enum_structlist_map	map<string,array<struct<my_string:string,my_enum:string>>>	from deserializer   
 my_stringlist       	array<string>       	from deserializer   
-my_structlist       	array<struct<my_string:string,my_enum:string,optionals:struct<>>>	from deserializer   
+my_structlist       	array<struct<my_string:string,my_enum:string>>	from deserializer   
 my_enumlist         	array<string>       	from deserializer   
 my_stringset        	array<string>       	from deserializer   
 my_enumset          	array<string>       	from deserializer   
-my_structset        	array<struct<my_string:string,my_enum:string,optionals:struct<>>>	from deserializer   
-optionals           	struct<>            	from deserializer   
+my_structset        	array<struct<my_string:string,my_enum:string>>	from deserializer   
 b                   	string              	                    
 	 	 
 # Partition Information	 	 

http://git-wip-us.apache.org/repos/asf/hive/blob/290ff1f5/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out b/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out
index 69fe396..394af7e 100644
--- a/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out
+++ b/ql/src/test/results/clientpositive/dynamic_rdd_cache.q.out
@@ -1066,19 +1066,23 @@ STAGE PLANS:
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
-          Filter Operator
-            predicate: (CASE (_col5) WHEN (0) THEN (0) ELSE ((_col4 / _col5)) END > 1) (type: boolean)
+          Select Operator
+            expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: double), _col5 (type: double)
+            outputColumnNames: _col1, _col2, _col3, _col4, _col5
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
-            Select Operator
-              expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col5 (type: double), CASE (_col5) WHEN (0) THEN (null) ELSE ((_col4 / _col5)) END (type: double)
-              outputColumnNames: _col1, _col2, _col3, _col5, _col6
+            Filter Operator
+              predicate: (CASE (_col5) WHEN (0) THEN (0) ELSE ((_col4 / _col5)) END > 1) (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
-              File Output Operator
-                compressed: true
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+              Select Operator
+                expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col5 (type: double), CASE (_col5) WHEN (0) THEN (null) ELSE ((_col4 / _col5)) END (type: double)
+                outputColumnNames: _col1, _col2, _col3, _col5, _col6
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                File Output Operator
+                  compressed: true
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
 
   Stage: Stage-5
     Map Reduce
@@ -1289,19 +1293,23 @@ STAGE PLANS:
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
           Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
-          Filter Operator
-            predicate: (CASE (_col5) WHEN (0) THEN (0) ELSE ((_col4 / _col5)) END > 1) (type: boolean)
+          Select Operator
+            expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col4 (type: double), _col5 (type: double)
+            outputColumnNames: _col1, _col2, _col3, _col4, _col5
             Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
-            Select Operator
-              expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col5 (type: double), CASE (_col5) WHEN (0) THEN (null) ELSE ((_col4 / _col5)) END (type: double)
-              outputColumnNames: _col1, _col2, _col3, _col5, _col6
+            Filter Operator
+              predicate: (CASE (_col5) WHEN (0) THEN (0) ELSE ((_col4 / _col5)) END > 1) (type: boolean)
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
-              File Output Operator
-                compressed: true
-                table:
-                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+              Select Operator
+                expressions: _col1 (type: int), _col2 (type: int), _col3 (type: int), _col5 (type: double), CASE (_col5) WHEN (0) THEN (null) ELSE ((_col4 / _col5)) END (type: double)
+                outputColumnNames: _col1, _col2, _col3, _col5, _col6
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE
+                File Output Operator
+                  compressed: true
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
 
   Stage: Stage-0
     Fetch Operator