You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/11/05 08:01:58 UTC

svn commit: r1538880 [33/46] - in /hive/branches/tez: ./ ant/ ant/src/org/apache/hadoop/hive/ant/ beeline/ beeline/src/java/org/apache/hive/beeline/ beeline/src/main/ beeline/src/test/org/apache/hive/beeline/src/test/ cli/ common/ common/src/java/conf/...

Modified: hive/branches/tez/ql/src/test/results/clientpositive/nullscript.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/nullscript.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/nullscript.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/nullscript.q.out Tue Nov  5 07:01:32 2013
@@ -3,16 +3,16 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE nullscript(KEY STRING, VALUE STRING) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@nullscript
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE nullscript
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE nullscript
 PREHOOK: type: LOAD
 PREHOOK: Output: default@nullscript
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE nullscript
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE nullscript
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@nullscript
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE nullscript
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE nullscript
 PREHOOK: type: LOAD
 PREHOOK: Output: default@nullscript
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nullfile.txt' INTO TABLE nullscript
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nullfile.txt' INTO TABLE nullscript
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@nullscript
 PREHOOK: query: explain

Modified: hive/branches/tez/ql/src/test/results/clientpositive/num_op_type_conv.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/num_op_type_conv.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/num_op_type_conv.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/num_op_type_conv.q.out Tue Nov  5 07:01:32 2013
@@ -12,43 +12,32 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ TOK_NULL 7)) (TOK_SELEXPR (- 1.0 TOK_NULL)) (TOK_SELEXPR (+ TOK_NULL TOK_NULL)) (TOK_SELEXPR (% (TOK_FUNCTION TOK_BIGINT 21) (TOK_FUNCTION TOK_TINYINT 5))) (TOK_SELEXPR (% (TOK_FUNCTION TOK_BIGINT 21) (TOK_FUNCTION TOK_BIGINT 21))) (TOK_SELEXPR (% 9 "3"))) (TOK_LIMIT 1)))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            Select Operator
-              expressions:
-                    expr: (null + 7)
-                    type: int
-                    expr: (1.0 - null)
-                    type: double
-                    expr: (null + null)
-                    type: tinyint
-                    expr: (UDFToLong(21) % UDFToByte(5))
-                    type: bigint
-                    expr: (UDFToLong(21) % UDFToLong(21))
-                    type: bigint
-                    expr: (9 % '3')
-                    type: double
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: 1
+      Processor Tree:
+        TableScan
+          alias: src
+          Select Operator
+            expressions:
+                  expr: (null + 7)
+                  type: int
+                  expr: (1.0 - null)
+                  type: double
+                  expr: (null + null)
+                  type: tinyint
+                  expr: (UDFToLong(21) % UDFToByte(5))
+                  type: bigint
+                  expr: (UDFToLong(21) % UDFToLong(21))
+                  type: bigint
+                  expr: (9 % '3')
+                  type: double
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+            Limit
+              ListSink
 
 
 PREHOOK: query: SELECT null + 7, 1.0 - null, null + null,

Modified: hive/branches/tez/ql/src/test/results/clientpositive/orc_create.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/orc_create.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/orc_create.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/orc_create.q.out Tue Nov  5 07:01:32 2013
@@ -302,10 +302,10 @@ Bucket Columns:     	[]                 
 Sort Columns:       	[]                  	 
 Storage Desc Params:	 	 
 	serialization.format	1                   
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/orc_create.txt' OVERWRITE INTO TABLE orc_create_staging
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/orc_create.txt' OVERWRITE INTO TABLE orc_create_staging
 PREHOOK: type: LOAD
 PREHOOK: Output: default@orc_create_staging
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/orc_create.txt' OVERWRITE INTO TABLE orc_create_staging
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/orc_create.txt' OVERWRITE INTO TABLE orc_create_staging
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@orc_create_staging
 PREHOOK: query: SELECT * from orc_create_staging
@@ -425,11 +425,11 @@ POSTHOOK: Lineage: orc_create_complex.ls
 POSTHOOK: Lineage: orc_create_complex.mp SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:mp, type:map<string,string>, comment:null), ]
 POSTHOOK: Lineage: orc_create_complex.str SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:str, type:string, comment:null), ]
 POSTHOOK: Lineage: orc_create_complex.strct SIMPLE [(orc_create_staging)orc_create_staging.FieldSchema(name:strct, type:struct<A:string,B:string>, comment:null), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/orc_create_people.txt'
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/orc_create_people.txt'
   OVERWRITE INTO TABLE orc_create_people_staging
 PREHOOK: type: LOAD
 PREHOOK: Output: default@orc_create_people_staging
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/orc_create_people.txt'
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/orc_create_people.txt'
   OVERWRITE INTO TABLE orc_create_people_staging
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@orc_create_people_staging

Modified: hive/branches/tez/ql/src/test/results/clientpositive/orc_dictionary_threshold.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/orc_dictionary_threshold.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/orc_dictionary_threshold.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/orc_dictionary_threshold.q.out Tue Nov  5 07:01:32 2013
@@ -66,11 +66,11 @@ POSTHOOK: query: CREATE TABLE src_thousa
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@src_thousand
 POSTHOOK: Lineage: test_orc.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1kv2.cogroup.txt' 
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1kv2.cogroup.txt' 
      INTO TABLE src_thousand
 PREHOOK: type: LOAD
 PREHOOK: Output: default@src_thousand
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1kv2.cogroup.txt' 
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1kv2.cogroup.txt' 
      INTO TABLE src_thousand
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@src_thousand

Modified: hive/branches/tez/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/orc_diff_part_cols.q.out Tue Nov  5 07:01:32 2013
@@ -15,7 +15,7 @@ PREHOOK: query: -- Create a table with o
 -- to another partition
 -- This can produce unexpected results with CombineHiveInputFormat
 
-INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src LIMIT 5
+INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src tablesample (5 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test_orc@part=1
@@ -23,7 +23,7 @@ POSTHOOK: query: -- Create a table with 
 -- to another partition
 -- This can produce unexpected results with CombineHiveInputFormat
 
-INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src LIMIT 5
+INSERT OVERWRITE TABLE test_orc PARTITION (part = '1') SELECT key FROM src tablesample (5 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@test_orc@part=1

Modified: hive/branches/tez/ql/src/test/results/clientpositive/orc_empty_strings.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/orc_empty_strings.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/orc_empty_strings.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/orc_empty_strings.q.out Tue Nov  5 07:01:32 2013
@@ -9,11 +9,11 @@ STORED AS INPUTFORMAT 'org.apache.hadoop
 OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@test_orc
-PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT '' FROM src limit 10
+PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT '' FROM src tablesample (10 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test_orc
-POSTHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT '' FROM src limit 10
+POSTHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT '' FROM src tablesample (10 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@test_orc
@@ -41,11 +41,11 @@ POSTHOOK: Lineage: test_orc.key SIMPLE [
 
 
 
-PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src limit 10
+PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src tablesample (10 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test_orc
-POSTHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src limit 10
+POSTHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT IF (key % 3 = 0, key, '') FROM src tablesample (10 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@test_orc

Modified: hive/branches/tez/ql/src/test/results/clientpositive/orc_ends_with_nulls.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/orc_ends_with_nulls.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/orc_ends_with_nulls.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/orc_ends_with_nulls.q.out Tue Nov  5 07:01:32 2013
@@ -30,10 +30,10 @@ POSTHOOK: query: -- nulls.txt is a file 
 CREATE TABLE src_null(a STRING) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@src_null
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nulls.txt' INTO TABLE src_null
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nulls.txt' INTO TABLE src_null
 PREHOOK: type: LOAD
 PREHOOK: Output: default@src_null
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/nulls.txt' INTO TABLE src_null
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/nulls.txt' INTO TABLE src_null
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@src_null
 PREHOOK: query: INSERT OVERWRITE TABLE test_orc SELECT a FROM src_null

Modified: hive/branches/tez/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out Tue Nov  5 07:01:32 2013
@@ -62,10 +62,10 @@ ROW FORMAT DELIMITED FIELDS TERMINATED B
 STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@staging
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/over1k' OVERWRITE INTO TABLE staging
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE staging
 PREHOOK: type: LOAD
 PREHOOK: Output: default@staging
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/over1k' OVERWRITE INTO TABLE staging
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/over1k' OVERWRITE INTO TABLE staging
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@staging
 PREHOOK: query: INSERT INTO TABLE orc_pred select * from staging

Modified: hive/branches/tez/ql/src/test/results/clientpositive/outer_join_ppr.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/outer_join_ppr.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/outer_join_ppr.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/outer_join_ppr.q.out Tue Nov  5 07:01:32 2013
@@ -80,7 +80,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.src
               numFiles 1
-              numPartitions 0
               numRows 0
               rawDataSize 0
               serialization.ddl struct src { string key, string value}
@@ -99,7 +98,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.src
                 numFiles 1
-                numPartitions 0
                 numRows 0
                 rawDataSize 0
                 serialization.ddl struct src { string key, string value}
@@ -143,15 +141,10 @@ STAGE PLANS:
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.srcpart
-                numFiles 4
-                numPartitions 4
-                numRows 0
                 partition_columns ds/hr
-                rawDataSize 0
                 serialization.ddl struct srcpart { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 23248
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.srcpart
@@ -189,15 +182,10 @@ STAGE PLANS:
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.srcpart
-                numFiles 4
-                numPartitions 4
-                numRows 0
                 partition_columns ds/hr
-                rawDataSize 0
                 serialization.ddl struct srcpart { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 23248
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.srcpart
@@ -235,15 +223,10 @@ STAGE PLANS:
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.srcpart
-                numFiles 4
-                numPartitions 4
-                numRows 0
                 partition_columns ds/hr
-                rawDataSize 0
                 serialization.ddl struct srcpart { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 23248
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.srcpart
@@ -281,15 +264,10 @@ STAGE PLANS:
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.srcpart
-                numFiles 4
-                numPartitions 4
-                numRows 0
                 partition_columns ds/hr
-                rawDataSize 0
                 serialization.ddl struct srcpart { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 23248
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.srcpart
@@ -481,7 +459,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.src
               numFiles 1
-              numPartitions 0
               numRows 0
               rawDataSize 0
               serialization.ddl struct src { string key, string value}
@@ -500,7 +477,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.src
                 numFiles 1
-                numPartitions 0
                 numRows 0
                 rawDataSize 0
                 serialization.ddl struct src { string key, string value}
@@ -544,15 +520,10 @@ STAGE PLANS:
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.srcpart
-                numFiles 4
-                numPartitions 4
-                numRows 0
                 partition_columns ds/hr
-                rawDataSize 0
                 serialization.ddl struct srcpart { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 23248
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.srcpart
@@ -590,15 +561,10 @@ STAGE PLANS:
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.srcpart
-                numFiles 4
-                numPartitions 4
-                numRows 0
                 partition_columns ds/hr
-                rawDataSize 0
                 serialization.ddl struct srcpart { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 23248
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.srcpart
@@ -636,15 +602,10 @@ STAGE PLANS:
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.srcpart
-                numFiles 4
-                numPartitions 4
-                numRows 0
                 partition_columns ds/hr
-                rawDataSize 0
                 serialization.ddl struct srcpart { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 23248
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.srcpart
@@ -682,15 +643,10 @@ STAGE PLANS:
                 columns.types string:string
 #### A masked pattern was here ####
                 name default.srcpart
-                numFiles 4
-                numPartitions 4
-                numRows 0
                 partition_columns ds/hr
-                rawDataSize 0
                 serialization.ddl struct srcpart { string key, string value}
                 serialization.format 1
                 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 23248
 #### A masked pattern was here ####
               serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
               name: default.srcpart

Modified: hive/branches/tez/ql/src/test/results/clientpositive/parallel_orderby.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/parallel_orderby.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/parallel_orderby.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/parallel_orderby.q.out Tue Nov  5 07:01:32 2013
@@ -3,16 +3,16 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table src5 (key string, value string)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@src5
-PREHOOK: query: load data local inpath '../data/files/kv5.txt' into table src5
+PREHOOK: query: load data local inpath '../../data/files/kv5.txt' into table src5
 PREHOOK: type: LOAD
 PREHOOK: Output: default@src5
-POSTHOOK: query: load data local inpath '../data/files/kv5.txt' into table src5
+POSTHOOK: query: load data local inpath '../../data/files/kv5.txt' into table src5
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@src5
-PREHOOK: query: load data local inpath '../data/files/kv5.txt' into table src5
+PREHOOK: query: load data local inpath '../../data/files/kv5.txt' into table src5
 PREHOOK: type: LOAD
 PREHOOK: Output: default@src5
-POSTHOOK: query: load data local inpath '../data/files/kv5.txt' into table src5
+POSTHOOK: query: load data local inpath '../../data/files/kv5.txt' into table src5
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@src5
 PREHOOK: query: explain
@@ -115,7 +115,6 @@ Retention:          	0                  
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
 	numFiles            	4                   
-	numPartitions       	0                   
 	numRows             	0                   
 	rawDataSize         	0                   
 	totalSize           	560                 
@@ -224,7 +223,6 @@ Retention:          	0                  
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
 	numFiles            	1                   
-	numPartitions       	0                   
 	numRows             	0                   
 	rawDataSize         	0                   
 	totalSize           	560                 

Modified: hive/branches/tez/ql/src/test/results/clientpositive/partcols1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/partcols1.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/partcols1.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/partcols1.q.out Tue Nov  5 07:01:32 2013
@@ -4,12 +4,12 @@ POSTHOOK: query: create table test1(col1
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@test1
 PREHOOK: query: insert overwrite table test1 partition (partitionId=1)
-  select key from src limit 10
+  select key from src tablesample (10 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@test1@partitionid=1
 POSTHOOK: query: insert overwrite table test1 partition (partitionId=1)
-  select key from src limit 10
+  select key from src tablesample (10 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@test1@partitionid=1

Modified: hive/branches/tez/ql/src/test/results/clientpositive/partition_date.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/partition_date.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/partition_date.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/partition_date.q.out Tue Nov  5 07:01:32 2013
@@ -7,25 +7,25 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table partition_date_1 (key string, value string) partitioned by (dt date, region int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@partition_date_1
-PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1) 
-  select * from src limit 10
+PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1)
+  select * from src tablesample (10 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_date_1@dt=2000-01-01/region=1
-POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1) 
-  select * from src limit 10
+POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=1)
+  select * from src tablesample (10 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_date_1@dt=2000-01-01/region=1
 POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2) 
-  select * from src limit 5
+PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2)
+  select * from src tablesample (5 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_date_1@dt=2000-01-01/region=2
-POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2) 
-  select * from src limit 5
+POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2000-01-01', region=2)
+  select * from src tablesample (5 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_date_1@dt=2000-01-01/region=2
@@ -34,12 +34,12 @@ POSTHOOK: Lineage: partition_date_1 PART
 POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2013-08-08', region=1) 
-  select * from src limit 20
+  select * from src tablesample (20 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_date_1@dt=2013-08-08/region=1
 POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2013-08-08', region=1) 
-  select * from src limit 20
+  select * from src tablesample (20 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_date_1@dt=2013-08-08/region=1
@@ -50,12 +50,12 @@ POSTHOOK: Lineage: partition_date_1 PART
 POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2013-08-08,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date_1 PARTITION(dt=2013-08-08,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_date_1 partition(dt='2013-08-08', region=10) 
-  select * from src limit 11
+  select * from src tablesample (11 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_date_1@dt=2013-08-08/region=10
 POSTHOOK: query: insert overwrite table partition_date_1 partition(dt='2013-08-08', region=10) 
-  select * from src limit 11
+  select * from src tablesample (11 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_date_1@dt=2013-08-08/region=10

Modified: hive/branches/tez/ql/src/test/results/clientpositive/partition_date2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/partition_date2.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/partition_date2.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/partition_date2.q.out Tue Nov  5 07:01:32 2013
@@ -8,7 +8,7 @@ POSTHOOK: query: create table partition_
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@partition_date2_1
 PREHOOK: query: -- test date literal syntax
-from (select * from src limit 1) x
+from (select * from src tablesample (1 rows)) x
 insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=1) select *
 insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=2) select *
 insert overwrite table partition_date2_1 partition(dt=date '1999-01-01', region=2) select *
@@ -18,7 +18,7 @@ PREHOOK: Output: default@partition_date2
 PREHOOK: Output: default@partition_date2_1@dt=2000-01-01/region=1
 PREHOOK: Output: default@partition_date2_1@dt=2000-01-01/region=2
 POSTHOOK: query: -- test date literal syntax
-from (select * from src limit 1) x
+from (select * from src tablesample (1 rows)) x
 insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=1) select *
 insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=2) select *
 insert overwrite table partition_date2_1 partition(dt=date '1999-01-01', region=2) select *
@@ -80,13 +80,13 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 238	val_238	2000-01-01	2
 PREHOOK: query: -- insert overwrite
 insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=2) 
-  select 'changed_key', 'changed_value' from src limit 2
+  select 'changed_key', 'changed_value' from src tablesample (2 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_date2_1@dt=2000-01-01/region=2
 POSTHOOK: query: -- insert overwrite
 insert overwrite table partition_date2_1 partition(dt=date '2000-01-01', region=2) 
-  select 'changed_key', 'changed_value' from src limit 2
+  select 'changed_key', 'changed_value' from src tablesample (2 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_date2_1@dt=2000-01-01/region=2
@@ -94,10 +94,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 PREHOOK: query: select * from partition_date2_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_date2_1
@@ -116,10 +116,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 238	val_238	1999-01-01	2
 238	val_238	2000-01-01	1
 changed_key	changed_value	2000-01-01	2
@@ -136,10 +136,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 PREHOOK: query: select distinct dt from partition_date2_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_date2_1
@@ -158,10 +158,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 1999-01-01
 2000-01-01
 PREHOOK: query: select * from partition_date2_1
@@ -182,10 +182,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 238	val_238	1999-01-01	2
 238	val_238	2000-01-01	1
 PREHOOK: query: -- alter table add partition
@@ -201,10 +201,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 PREHOOK: query: select distinct dt from partition_date2_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_date2_1
@@ -225,10 +225,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 1980-01-02
 1999-01-01
 2000-01-01
@@ -252,10 +252,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 238	val_238	1999-01-01	2
 238	val_238	2000-01-01	1
 PREHOOK: query: -- alter table drop
@@ -272,10 +272,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 PREHOOK: query: select distinct dt from partition_date2_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_date2_1
@@ -294,10 +294,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 1980-01-02
 2000-01-01
 PREHOOK: query: select * from partition_date2_1
@@ -318,10 +318,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 238	val_238	2000-01-01	1
 PREHOOK: query: -- alter table set serde
 alter table partition_date2_1 partition(dt=date '1980-01-02', region=3) 
@@ -340,10 +340,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 PREHOOK: query: -- alter table set fileformat
 alter table partition_date2_1 partition(dt=date '1980-01-02', region=3)
   set fileformat rcfile
@@ -361,10 +361,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 PREHOOK: query: describe extended partition_date2_1  partition(dt=date '1980-01-02', region=3)
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: describe extended partition_date2_1  partition(dt=date '1980-01-02', region=3)
@@ -373,10 +373,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 key                 	string              	None                
 value               	string              	None                
 dt                  	date                	None                
@@ -390,12 +390,12 @@ region              	int                
 	 	 
 #### A masked pattern was here ####
 PREHOOK: query: insert overwrite table partition_date2_1 partition(dt=date '1980-01-02', region=3)
-  select * from src limit 2
+  select * from src tablesample (2 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_date2_1@dt=1980-01-02/region=3
 POSTHOOK: query: insert overwrite table partition_date2_1 partition(dt=date '1980-01-02', region=3)
-  select * from src limit 2
+  select * from src tablesample (2 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_date2_1@dt=1980-01-02/region=3
@@ -405,10 +405,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 PREHOOK: query: select * from partition_date2_1 order by key,value,dt,region
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partition_date2_1
@@ -429,10 +429,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 238	val_238	1980-01-02	3
 238	val_238	2000-01-01	1
 86	val_86	1980-01-02	3
@@ -455,10 +455,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 PREHOOK: query: describe extended partition_date2_1 partition(dt=date '1980-01-02', region=3)
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: describe extended partition_date2_1 partition(dt=date '1980-01-02', region=3)
@@ -469,10 +469,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 key                 	string              	None                
 value               	string              	None                
 dt                  	date                	None                
@@ -502,10 +502,10 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 PREHOOK: query: drop table partition_date2_1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@partition_date2_1
@@ -520,7 +520,7 @@ POSTHOOK: Lineage: partition_date2_1 PAR
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
-POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE []
+POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE []

Modified: hive/branches/tez/ql/src/test/results/clientpositive/partition_decode_name.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/partition_decode_name.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/partition_decode_name.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/partition_decode_name.q.out Tue Nov  5 07:01:32 2013
@@ -1,17 +1,17 @@
 PREHOOK: query: create table sc as select * 
-from (select '2011-01-11', '2011-01-11+14:18:26' from src limit 1 
+from (select '2011-01-11', '2011-01-11+14:18:26' from src tablesample (1 rows)
       union all 
-      select '2011-01-11', '2011-01-11+15:18:26' from src limit 1 
+      select '2011-01-11', '2011-01-11+15:18:26' from src tablesample (1 rows)
       union all 
-      select '2011-01-11', '2011-01-11+16:18:26' from src limit 1 ) s
+      select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
 POSTHOOK: query: create table sc as select * 
-from (select '2011-01-11', '2011-01-11+14:18:26' from src limit 1 
+from (select '2011-01-11', '2011-01-11+14:18:26' from src tablesample (1 rows)
       union all 
-      select '2011-01-11', '2011-01-11+15:18:26' from src limit 1 
+      select '2011-01-11', '2011-01-11+15:18:26' from src tablesample (1 rows)
       union all 
-      select '2011-01-11', '2011-01-11+16:18:26' from src limit 1 ) s
+      select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@sc

Modified: hive/branches/tez/ql/src/test/results/clientpositive/partition_special_char.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/partition_special_char.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/partition_special_char.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/partition_special_char.q.out Tue Nov  5 07:01:32 2013
@@ -1,17 +1,17 @@
 PREHOOK: query: create table sc as select * 
-from (select '2011-01-11', '2011-01-11+14:18:26' from src limit 1 
+from (select '2011-01-11', '2011-01-11+14:18:26' from src tablesample (1 rows)
       union all 
-      select '2011-01-11', '2011-01-11+15:18:26' from src limit 1 
+      select '2011-01-11', '2011-01-11+15:18:26' from src tablesample (1 rows)
       union all 
-      select '2011-01-11', '2011-01-11+16:18:26' from src limit 1 ) s
+      select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s
 PREHOOK: type: CREATETABLE_AS_SELECT
 PREHOOK: Input: default@src
 POSTHOOK: query: create table sc as select * 
-from (select '2011-01-11', '2011-01-11+14:18:26' from src limit 1 
+from (select '2011-01-11', '2011-01-11+14:18:26' from src tablesample (1 rows)
       union all 
-      select '2011-01-11', '2011-01-11+15:18:26' from src limit 1 
+      select '2011-01-11', '2011-01-11+15:18:26' from src tablesample (1 rows)
       union all 
-      select '2011-01-11', '2011-01-11+16:18:26' from src limit 1 ) s
+      select '2011-01-11', '2011-01-11+16:18:26' from src tablesample (1 rows) ) s
 POSTHOOK: type: CREATETABLE_AS_SELECT
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@sc

Modified: hive/branches/tez/ql/src/test/results/clientpositive/partition_type_check.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/partition_type_check.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/partition_type_check.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/partition_type_check.q.out Tue Nov  5 07:01:32 2013
@@ -5,10 +5,10 @@ POSTHOOK: query: -- begin part(string, s
 CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day string) stored as textfile
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tab1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day=2)
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day=2)
 PREHOOK: type: LOAD
 PREHOOK: Output: default@tab1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day=2)
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day=2)
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@tab1
 POSTHOOK: Output: default@tab1@month=June/day=2
@@ -43,10 +43,10 @@ POSTHOOK: query: -- begin part(string, i
 CREATE TABLE tab1 (id1 int,id2 string) PARTITIONED BY(month string,day int) stored as textfile
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tab1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2')
 PREHOOK: type: LOAD
 PREHOOK: Output: default@tab1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2')
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@tab1
 POSTHOOK: Output: default@tab1@month=June/day=2
@@ -88,10 +88,10 @@ POSTHOOK: query: alter table tab1 add pa
 POSTHOOK: type: ALTERTABLE_ADDPARTS
 POSTHOOK: Input: default@tab1
 POSTHOOK: Output: default@tab1@month=June/day=2008-01-01
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2008-01-01')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2008-01-01')
 PREHOOK: type: LOAD
 PREHOOK: Output: default@tab1@month=June/day=2008-01-01
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2008-01-01')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/T1.txt' overwrite into table tab1 PARTITION(month='June', day='2008-01-01')
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@tab1@month=June/day=2008-01-01
 PREHOOK: query: select id1, id2, day from tab1 where day='2008-01-01'

Modified: hive/branches/tez/ql/src/test/results/clientpositive/partition_varchar1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/partition_varchar1.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/partition_varchar1.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/partition_varchar1.q.out Tue Nov  5 07:01:32 2013
@@ -8,24 +8,24 @@ POSTHOOK: query: create table partition_
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@partition_varchar_1
 PREHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2000-01-01', region=1)
-  select * from src limit 10
+  select * from src tablesample (10 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_varchar_1@dt=2000-01-01/region=1
 POSTHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2000-01-01', region=1)
-  select * from src limit 10
+  select * from src tablesample (10 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_varchar_1@dt=2000-01-01/region=1
 POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2000-01-01,region=1).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2000-01-01', region=2)
-  select * from src limit 5
+  select * from src tablesample (5 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_varchar_1@dt=2000-01-01/region=2
 POSTHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2000-01-01', region=2)
-  select * from src limit 5
+  select * from src tablesample (5 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_varchar_1@dt=2000-01-01/region=2
@@ -34,12 +34,12 @@ POSTHOOK: Lineage: partition_varchar_1 P
 POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2000-01-01,region=2).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2013-08-08', region=1)
-  select * from src limit 20
+  select * from src tablesample (20 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_varchar_1@dt=2013-08-08/region=1
 POSTHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2013-08-08', region=1)
-  select * from src limit 20
+  select * from src tablesample (20 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_varchar_1@dt=2013-08-08/region=1
@@ -50,12 +50,12 @@ POSTHOOK: Lineage: partition_varchar_1 P
 POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2013-08-08,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: partition_varchar_1 PARTITION(dt=2013-08-08,region=1).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2013-08-08', region=10)
-  select * from src limit 11
+  select * from src tablesample (11 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@partition_varchar_1@dt=2013-08-08/region=10
 POSTHOOK: query: insert overwrite table partition_varchar_1 partition(dt='2013-08-08', region=10)
-  select * from src limit 11
+  select * from src tablesample (11 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@partition_varchar_1@dt=2013-08-08/region=10

Modified: hive/branches/tez/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/partition_wise_fileformat17.q.out Tue Nov  5 07:01:32 2013
@@ -15,10 +15,10 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE PW17(USER STRING, COMPLEXDT ARRAY<INT>) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@PW17
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1')
 PREHOOK: type: LOAD
 PREHOOK: Output: default@pw17
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17 PARTITION (YEAR='1')
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@pw17
 POSTHOOK: Output: default@pw17@year=1
@@ -66,10 +66,10 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE PW17_2(USER STRING, COMPLEXDT ARRAY<INT>) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe1'
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@PW17_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_2
 PREHOOK: type: LOAD
 PREHOOK: Output: default@pw17_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_2
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@pw17_2
 PREHOOK: query: -- Without the fix HIVE-5199, will throw cast exception via MapOperator
@@ -92,10 +92,10 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE PW17_3(USER STRING, COMPLEXDT ARRAY<ARRAY<INT> >) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3'
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@PW17_3
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1')
 PREHOOK: type: LOAD
 PREHOOK: Output: default@pw17_3
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_3 PARTITION (YEAR='1')
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@pw17_3
 POSTHOOK: Output: default@pw17_3@year=1
@@ -141,10 +141,10 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE PW17_4(USER STRING, COMPLEXDT ARRAY<ARRAY<INT> >) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe3'
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@PW17_4
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_4
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_4
 PREHOOK: type: LOAD
 PREHOOK: Output: default@pw17_4
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW17_4
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW17_4
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@pw17_4
 PREHOOK: query: -- Without the fix HIVE-5285, will throw cast exception via MapOperator

Modified: hive/branches/tez/ql/src/test/results/clientpositive/partition_wise_fileformat18.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/partition_wise_fileformat18.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/partition_wise_fileformat18.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/partition_wise_fileformat18.q.out Tue Nov  5 07:01:32 2013
@@ -17,10 +17,10 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE PW18(USER STRING, COMPLEXDT UNIONTYPE<INT, DOUBLE>) PARTITIONED BY (YEAR STRING) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe5'
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@PW18
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW18 PARTITION (YEAR='1')
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW18 PARTITION (YEAR='1')
 PREHOOK: type: LOAD
 PREHOOK: Output: default@pw18
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW18 PARTITION (YEAR='1')
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW18 PARTITION (YEAR='1')
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@pw18
 POSTHOOK: Output: default@pw18@year=1
@@ -60,10 +60,10 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE PW18_2(USER STRING, COMPLEXDT UNIONTYPE<INT, DOUBLE>) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.CustomSerDe5'
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@PW18_2
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW18_2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW18_2
 PREHOOK: type: LOAD
 PREHOOK: Output: default@pw18_2
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/pw17.txt' INTO TABLE PW18_2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/pw17.txt' INTO TABLE PW18_2
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@pw18_2
 PREHOOK: query: -- Without the fix HIVE-5202, will throw unsupported data type exception