You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/11/05 08:01:58 UTC

svn commit: r1538880 [39/46] - in /hive/branches/tez: ./ ant/ ant/src/org/apache/hadoop/hive/ant/ beeline/ beeline/src/java/org/apache/hive/beeline/ beeline/src/main/ beeline/src/test/org/apache/hive/beeline/src/test/ cli/ common/ common/src/java/conf/...

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_coalesce.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_coalesce.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_coalesce.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_coalesce.q.out Tue Nov  5 07:01:32 2013
@@ -30,7 +30,7 @@ SELECT COALESCE(1),
        COALESCE(NULL, 2.0, 3.0),
        COALESCE(2.0, NULL, 3.0),
        COALESCE(IF(TRUE, NULL, 0), NULL)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN
 SELECT COALESCE(1),
@@ -51,73 +51,62 @@ SELECT COALESCE(1),
        COALESCE(NULL, 2.0, 3.0),
        COALESCE(2.0, NULL, 3.0),
        COALESCE(IF(TRUE, NULL, 0), NULL)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION COALESCE 1)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1 2)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1 TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL TOK_NULL 3)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 4 TOK_NULL TOK_NULL TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1' '2')) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL '2')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1' TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL TOK_NULL '3')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '4' TOK_NULL TOK_NULL TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1.0 2.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2.0 3.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 2.0 TOK_NULL 3.0)) (TOK
 _SELEXPR (TOK_FUNCTION COALESCE (TOK_FUNCTION IF TRUE TOK_NULL 0) TOK_NULL))) (TOK_LIMIT 1)))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION COALESCE 1)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1 2)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1 TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL TOK_NULL 3)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 4 TOK_NULL TOK_NULL TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1' '2')) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL '2')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '1' TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL TOK_NULL '3')) (TOK_SELEXPR (TOK_FUNCTION COALESCE '4' TOK_NULL TOK_NULL TOK_NULL)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE 1.0 2.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE TOK_NULL 2.0 3.0)) (TOK_SELEXPR (TOK_FUN
 CTION COALESCE 2.0 TOK_NULL 3.0)) (TOK_SELEXPR (TOK_FUNCTION COALESCE (TOK_FUNCTION IF TRUE TOK_NULL 0) TOK_NULL)))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            Select Operator
-              expressions:
-                    expr: COALESCE(1)
-                    type: int
-                    expr: COALESCE(1,2)
-                    type: int
-                    expr: COALESCE(null,2)
-                    type: int
-                    expr: COALESCE(1,null)
-                    type: int
-                    expr: COALESCE(null,null,3)
-                    type: int
-                    expr: COALESCE(4,null,null,null)
-                    type: int
-                    expr: COALESCE('1')
-                    type: string
-                    expr: COALESCE('1','2')
-                    type: string
-                    expr: COALESCE(null,'2')
-                    type: string
-                    expr: COALESCE('1',null)
-                    type: string
-                    expr: COALESCE(null,null,'3')
-                    type: string
-                    expr: COALESCE('4',null,null,null)
-                    type: string
-                    expr: COALESCE(1.0)
-                    type: double
-                    expr: COALESCE(1.0,2.0)
-                    type: double
-                    expr: COALESCE(null,2.0)
-                    type: double
-                    expr: COALESCE(null,2.0,3.0)
-                    type: double
-                    expr: COALESCE(2.0,null,3.0)
-                    type: double
-                    expr: COALESCE(if(true, null, 0),null)
-                    type: int
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
-      limit: 1
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: src
+          Row Limit Per Split: 1
+          Select Operator
+            expressions:
+                  expr: COALESCE(1)
+                  type: int
+                  expr: COALESCE(1,2)
+                  type: int
+                  expr: COALESCE(null,2)
+                  type: int
+                  expr: COALESCE(1,null)
+                  type: int
+                  expr: COALESCE(null,null,3)
+                  type: int
+                  expr: COALESCE(4,null,null,null)
+                  type: int
+                  expr: COALESCE('1')
+                  type: string
+                  expr: COALESCE('1','2')
+                  type: string
+                  expr: COALESCE(null,'2')
+                  type: string
+                  expr: COALESCE('1',null)
+                  type: string
+                  expr: COALESCE(null,null,'3')
+                  type: string
+                  expr: COALESCE('4',null,null,null)
+                  type: string
+                  expr: COALESCE(1.0)
+                  type: double
+                  expr: COALESCE(1.0,2.0)
+                  type: double
+                  expr: COALESCE(null,2.0)
+                  type: double
+                  expr: COALESCE(null,2.0,3.0)
+                  type: double
+                  expr: COALESCE(2.0,null,3.0)
+                  type: double
+                  expr: COALESCE(if(true, null, 0),null)
+                  type: int
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17
+            ListSink
 
 
 PREHOOK: query: SELECT COALESCE(1),
@@ -138,7 +127,7 @@ PREHOOK: query: SELECT COALESCE(1),
        COALESCE(NULL, 2.0, 3.0),
        COALESCE(2.0, NULL, 3.0),
        COALESCE(IF(TRUE, NULL, 0), NULL)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -160,7 +149,7 @@ POSTHOOK: query: SELECT COALESCE(1),
        COALESCE(NULL, 2.0, 3.0),
        COALESCE(2.0, NULL, 3.0),
        COALESCE(IF(TRUE, NULL, 0), NULL)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -181,36 +170,25 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src_thrift))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION COALESCE ([ (. (TOK_TABLE_OR_COL src_thrift) lint) 1) 999)) (TOK_SELEXPR (TOK_FUNCTION COALESCE (. ([ (. (TOK_TABLE_OR_COL src_thrift) lintstring) 0) mystring) '999')) (TOK_SELEXPR (TOK_FUNCTION COALESCE ([ (. (TOK_TABLE_OR_COL src_thrift) mstringstring) 'key_2') '999')))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src_thrift 
-          TableScan
-            alias: src_thrift
-            Select Operator
-              expressions:
-                    expr: COALESCE(lint[1],999)
-                    type: int
-                    expr: COALESCE(lintstring[0].mystring,'999')
-                    type: string
-                    expr: COALESCE(mstringstring['key_2'],'999')
-                    type: string
-              outputColumnNames: _col0, _col1, _col2
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: src_thrift
+          Select Operator
+            expressions:
+                  expr: COALESCE(lint[1],999)
+                  type: int
+                  expr: COALESCE(lintstring[0].mystring,'999')
+                  type: string
+                  expr: COALESCE(mstringstring['key_2'],'999')
+                  type: string
+            outputColumnNames: _col0, _col1, _col2
+            ListSink
 
 
 PREHOOK: query: SELECT COALESCE(src_thrift.lint[1], 999),

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_concat.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_concat.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_concat.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_concat.q.out Tue Nov  5 07:01:32 2013
@@ -23,7 +23,7 @@ PREHOOK: query: SELECT
   concat(1, 2),
   concat(1),
   concat('1234', 'abc', 'extra argument')
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -38,7 +38,7 @@ POSTHOOK: query: SELECT
   concat(1, 2),
   concat(1),
   concat('1234', 'abc', 'extra argument')
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -47,7 +47,7 @@ PREHOOK: query: -- binary/mixed
 SELECT
   concat(cast('ab' as binary), cast('cd' as binary)),
   concat('ab', cast('cd' as binary))
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -55,7 +55,7 @@ POSTHOOK: query: -- binary/mixed
 SELECT
   concat(cast('ab' as binary), cast('cd' as binary)),
   concat('ab', cast('cd' as binary))
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_concat_ws.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_concat_ws.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_concat_ws.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_concat_ws.q.out Tue Nov  5 07:01:32 2013
@@ -46,38 +46,27 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME dest1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION concat_ws (. (TOK_TABLE_OR_COL dest1) c1) (. (TOK_TABLE_OR_COL dest1) c2) (. (TOK_TABLE_OR_COL dest1) c3))) (TOK_SELEXPR (TOK_FUNCTION concat_ws ',' (. (TOK_TABLE_OR_COL dest1) c1) (. (TOK_TABLE_OR_COL dest1) c2) (. (TOK_TABLE_OR_COL dest1) c3))) (TOK_SELEXPR (TOK_FUNCTION concat_ws TOK_NULL (. (TOK_TABLE_OR_COL dest1) c1) (. (TOK_TABLE_OR_COL dest1) c2) (. (TOK_TABLE_OR_COL dest1) c3))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '**' (. (TOK_TABLE_OR_COL dest1) c1) TOK_NULL (. (TOK_TABLE_OR_COL dest1) c3))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        dest1 
-          TableScan
-            alias: dest1
-            Select Operator
-              expressions:
-                    expr: concat_ws(c1, c2, c3)
-                    type: string
-                    expr: concat_ws(',', c1, c2, c3)
-                    type: string
-                    expr: concat_ws(null, c1, c2, c3)
-                    type: string
-                    expr: concat_ws('**', c1, null, c3)
-                    type: string
-              outputColumnNames: _col0, _col1, _col2, _col3
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: dest1
+          Select Operator
+            expressions:
+                  expr: concat_ws(c1, c2, c3)
+                  type: string
+                  expr: concat_ws(',', c1, c2, c3)
+                  type: string
+                  expr: concat_ws(null, c1, c2, c3)
+                  type: string
+                  expr: concat_ws('**', c1, null, c3)
+                  type: string
+            outputColumnNames: _col0, _col1, _col2, _col3
+            ListSink
 
 
 PREHOOK: query: SELECT concat_ws(dest1.c1, dest1.c2, dest1.c3),
@@ -106,7 +95,7 @@ SELECT concat_ws('.', array('www', 'face
        concat_ws('_', array('www', 'face'), array('book', 'com', '1234')),
        concat_ws('**', 'www', array('face'), array('book', 'com', '1234')),
        concat_ws('[]', array('www'), 'face', array('book', 'com', '1234')),
-       concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1
+       concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows)
 PREHOOK: type: QUERY
 POSTHOOK: query: -- evalutes function for array of strings
 EXPLAIN
@@ -116,54 +105,43 @@ SELECT concat_ws('.', array('www', 'face
        concat_ws('_', array('www', 'face'), array('book', 'com', '1234')),
        concat_ws('**', 'www', array('face'), array('book', 'com', '1234')),
        concat_ws('[]', array('www'), 'face', array('book', 'com', '1234')),
-       concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1
+       concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Lineage: dest1.c1 SIMPLE []
 POSTHOOK: Lineage: dest1.c2 SIMPLE []
 POSTHOOK: Lineage: dest1.c3 SIMPLE []
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME dest1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION concat_ws '.' (TOK_FUNCTION array 'www' 'face' 'book' 'com') '1234')) (TOK_SELEXPR (TOK_FUNCTION concat_ws '-' 'www' (TOK_FUNCTION array 'face' 'book' 'com') '1234')) (TOK_SELEXPR (TOK_FUNCTION concat_ws 'F' 'www' (TOK_FUNCTION array 'face' 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '_' (TOK_FUNCTION array 'www' 'face') (TOK_FUNCTION array 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '**' 'www' (TOK_FUNCTION array 'face') (TOK_FUNCTION array 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '[]' (TOK_FUNCTION array 'www') 'face' (TOK_FUNCTION array 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws 'AAA' (TOK_FUNCTION array 'www') (TOK_FUNCTION array 'face' 'book' 'com') '1234'))) (TOK_LIMIT 1)))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME dest1) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION concat_ws '.' (TOK_FUNCTION array 'www' 'face' 'book' 'com') '1234')) (TOK_SELEXPR (TOK_FUNCTION concat_ws '-' 'www' (TOK_FUNCTION array 'face' 'book' 'com') '1234')) (TOK_SELEXPR (TOK_FUNCTION concat_ws 'F' 'www' (TOK_FUNCTION array 'face' 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '_' (TOK_FUNCTION array 'www' 'face') (TOK_FUNCTION array 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '**' 'www' (TOK_FUNCTION array 'face') (TOK_FUNCTION array 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws '[]' (TOK_FUNCTION array 'www') 'face' (TOK_FUNCTION array 'book' 'com' '1234'))) (TOK_SELEXPR (TOK_FUNCTION concat_ws 'AAA' (TOK_FUNCTION array 'www') (TOK_FUNCTION array 'face' 'book' 'com') '1234')))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        dest1 
-          TableScan
-            alias: dest1
-            Select Operator
-              expressions:
-                    expr: concat_ws('.', array('www','face','book','com'), '1234')
-                    type: string
-                    expr: concat_ws('-', 'www', array('face','book','com'), '1234')
-                    type: string
-                    expr: concat_ws('F', 'www', array('face','book','com','1234'))
-                    type: string
-                    expr: concat_ws('_', array('www','face'), array('book','com','1234'))
-                    type: string
-                    expr: concat_ws('**', 'www', array('face'), array('book','com','1234'))
-                    type: string
-                    expr: concat_ws('[]', array('www'), 'face', array('book','com','1234'))
-                    type: string
-                    expr: concat_ws('AAA', array('www'), array('face','book','com'), '1234')
-                    type: string
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
-      limit: 1
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: dest1
+          Row Limit Per Split: 1
+          Select Operator
+            expressions:
+                  expr: concat_ws('.', array('www','face','book','com'), '1234')
+                  type: string
+                  expr: concat_ws('-', 'www', array('face','book','com'), '1234')
+                  type: string
+                  expr: concat_ws('F', 'www', array('face','book','com','1234'))
+                  type: string
+                  expr: concat_ws('_', array('www','face'), array('book','com','1234'))
+                  type: string
+                  expr: concat_ws('**', 'www', array('face'), array('book','com','1234'))
+                  type: string
+                  expr: concat_ws('[]', array('www'), 'face', array('book','com','1234'))
+                  type: string
+                  expr: concat_ws('AAA', array('www'), array('face','book','com'), '1234')
+                  type: string
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
+            ListSink
 
 
 PREHOOK: query: SELECT concat_ws('.', array('www', 'face', 'book', 'com'), '1234'),
@@ -172,7 +150,7 @@ PREHOOK: query: SELECT concat_ws('.', ar
        concat_ws('_', array('www', 'face'), array('book', 'com', '1234')),
        concat_ws('**', 'www', array('face'), array('book', 'com', '1234')),
        concat_ws('[]', array('www'), 'face', array('book', 'com', '1234')),
-       concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1
+       concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1
 #### A masked pattern was here ####
@@ -182,7 +160,7 @@ POSTHOOK: query: SELECT concat_ws('.', a
        concat_ws('_', array('www', 'face'), array('book', 'com', '1234')),
        concat_ws('**', 'www', array('face'), array('book', 'com', '1234')),
        concat_ws('[]', array('www'), 'face', array('book', 'com', '1234')),
-       concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1
+       concat_ws('AAA', array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1
 #### A masked pattern was here ####
@@ -196,7 +174,7 @@ PREHOOK: query: SELECT concat_ws(NULL, a
        concat_ws(NULL, array('www', 'face'), array('book', 'com', '1234')),
        concat_ws(NULL, 'www', array('face'), array('book', 'com', '1234')),
        concat_ws(NULL, array('www'), 'face', array('book', 'com', '1234')),
-       concat_ws(NULL, array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1
+       concat_ws(NULL, array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1
 #### A masked pattern was here ####
@@ -206,7 +184,7 @@ POSTHOOK: query: SELECT concat_ws(NULL, 
        concat_ws(NULL, array('www', 'face'), array('book', 'com', '1234')),
        concat_ws(NULL, 'www', array('face'), array('book', 'com', '1234')),
        concat_ws(NULL, array('www'), 'face', array('book', 'com', '1234')),
-       concat_ws(NULL, array('www'), array('face', 'book', 'com'), '1234') FROM dest1 LIMIT 1
+       concat_ws(NULL, array('www'), array('face', 'book', 'com'), '1234') FROM dest1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1
 #### A masked pattern was here ####

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_conv.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_conv.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_conv.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_conv.q.out Tue Nov  5 07:01:32 2013
@@ -22,7 +22,7 @@ SELECT
   conv('22', 10, 10),
   conv('110011', 2, 16),
   conv('facebook', 36, 16)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -34,7 +34,7 @@ SELECT
   conv('22', 10, 10),
   conv('110011', 2, 16),
   conv('facebook', 36, 16)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -46,7 +46,7 @@ SELECT
   conv('1011', 2, -16),
   conv('-1', 10, 16),
   conv('-15', 10, 16)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -57,7 +57,7 @@ SELECT
   conv('1011', 2, -16),
   conv('-1', 10, 16),
   conv('-15', 10, 16)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -69,7 +69,7 @@ SELECT
   conv('9223372036854775807', 36, -16),
   conv('-9223372036854775807', 36, 16),
   conv('-9223372036854775807', 36, -16)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -80,7 +80,7 @@ SELECT
   conv('9223372036854775807', 36, -16),
   conv('-9223372036854775807', 36, 16),
   conv('-9223372036854775807', 36, -16)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -93,7 +93,7 @@ SELECT
   conv('131', 1, 5),
   conv('515', 5, 100),
   conv('10', -2, 2)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -105,7 +105,7 @@ SELECT
   conv('131', 1, 5),
   conv('515', 5, 100),
   conv('10', -2, 2)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -116,7 +116,7 @@ SELECT
   conv(4521, 10, 36),
   conv(22, 10, 10),
   conv(110011, 2, 16)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -126,7 +126,7 @@ SELECT
   conv(4521, 10, 36),
   conv(22, 10, 10),
   conv(110011, 2, 16)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -136,7 +136,7 @@ PREHOOK: query: SELECT
   conv(1011, 2, -16),
   conv(-1, 10, 16),
   conv(-15, 10, 16)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -145,7 +145,7 @@ POSTHOOK: query: SELECT
   conv(1011, 2, -16),
   conv(-1, 10, 16),
   conv(-15, 10, 16)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -155,7 +155,7 @@ PREHOOK: query: SELECT
   conv(9223372036854775807, 36, -16),
   conv(-9223372036854775807, 36, 16),
   conv(-9223372036854775807, 36, -16)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -164,7 +164,7 @@ POSTHOOK: query: SELECT
   conv(9223372036854775807, 36, -16),
   conv(-9223372036854775807, 36, 16),
   conv(-9223372036854775807, 36, -16)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -174,7 +174,7 @@ PREHOOK: query: SELECT
   conv(131, 1, 5),
   conv(515, 5, 100),
   conv('10', -2, 2)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -183,7 +183,7 @@ POSTHOOK: query: SELECT
   conv(131, 1, 5),
   conv(515, 5, 100),
   conv('10', -2, 2)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -192,7 +192,7 @@ PREHOOK: query: -- Make sure that state 
 
 SELECT conv(key, 10, 16),
        conv(key, 16, 10)
-FROM src LIMIT 3
+FROM src tablesample (3 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -200,7 +200,7 @@ POSTHOOK: query: -- Make sure that state
 
 SELECT conv(key, 10, 16),
        conv(key, 16, 10)
-FROM src LIMIT 3
+FROM src tablesample (3 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_cos.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_cos.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_cos.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_cos.q.out Tue Nov  5 07:01:32 2013
@@ -12,23 +12,23 @@ Example:
    > SELECT cos(0) FROM src LIMIT 1;
   1
 PREHOOK: query: SELECT cos(null)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
 POSTHOOK: query: SELECT cos(null)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 NULL
 PREHOOK: query: SELECT cos(0.98), cos(1.57), cos(-0.5)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
 POSTHOOK: query: SELECT cos(0.98), cos(1.57), cos(-0.5)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_degrees.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_degrees.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_degrees.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_degrees.q.out Tue Nov  5 07:01:32 2013
@@ -1,47 +1,36 @@
-PREHOOK: query: explain 
-select degrees(PI()) FROM src LIMIT 1
+PREHOOK: query: explain
+select degrees(PI()) FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
-POSTHOOK: query: explain 
-select degrees(PI()) FROM src LIMIT 1
+POSTHOOK: query: explain
+select degrees(PI()) FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION degrees (TOK_FUNCTION PI)))) (TOK_LIMIT 1)))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION degrees (TOK_FUNCTION PI))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            Select Operator
-              expressions:
-                    expr: degrees(pi())
-                    type: double
-              outputColumnNames: _col0
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
-      limit: 1
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: src
+          Row Limit Per Split: 1
+          Select Operator
+            expressions:
+                  expr: degrees(pi())
+                  type: double
+            outputColumnNames: _col0
+            ListSink
 
 
-PREHOOK: query: select degrees(PI()) FROM src LIMIT 1
+PREHOOK: query: select degrees(PI()) FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: select degrees(PI()) FROM src LIMIT 1
+POSTHOOK: query: select degrees(PI()) FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -61,49 +50,38 @@ Example:
   -1
 
 PREHOOK: query: explain 
-select degrees(PI()) FROM src LIMIT 1
+select degrees(PI()) FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 POSTHOOK: query: explain 
-select degrees(PI()) FROM src LIMIT 1
+select degrees(PI()) FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION degrees (TOK_FUNCTION PI)))) (TOK_LIMIT 1)))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION degrees (TOK_FUNCTION PI))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            Select Operator
-              expressions:
-                    expr: degrees(pi())
-                    type: double
-              outputColumnNames: _col0
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
-      limit: 1
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: src
+          Row Limit Per Split: 1
+          Select Operator
+            expressions:
+                  expr: degrees(pi())
+                  type: double
+            outputColumnNames: _col0
+            ListSink
 
 
-PREHOOK: query: select degrees(PI()) FROM src LIMIT 1
+PREHOOK: query: select degrees(PI()) FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: select degrees(PI()) FROM src LIMIT 1
+POSTHOOK: query: select degrees(PI()) FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_div.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_div.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_div.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_div.q.out Tue Nov  5 07:01:32 2013
@@ -11,11 +11,11 @@ a div b - Divide a by b rounded to the l
 Example:
   > SELECT 3 div 2 FROM src LIMIT 1;
   1
-PREHOOK: query: SELECT 3 DIV 2 FROM SRC LIMIT 1
+PREHOOK: query: SELECT 3 DIV 2 FROM SRC tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT 3 DIV 2 FROM SRC LIMIT 1
+POSTHOOK: query: SELECT 3 DIV 2 FROM SRC tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_divide.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_divide.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_divide.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_divide.q.out Tue Nov  5 07:01:32 2013
@@ -11,11 +11,11 @@ a / b - Divide a by b
 Example:
   > SELECT 3 / 2 FROM src LIMIT 1;
   1.5
-PREHOOK: query: SELECT 3 / 2 FROM SRC LIMIT 1
+PREHOOK: query: SELECT 3 / 2 FROM SRC tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT 3 / 2 FROM SRC LIMIT 1
+POSTHOOK: query: SELECT 3 / 2 FROM SRC tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_elt.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_elt.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_elt.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_elt.q.out Tue Nov  5 07:01:32 2013
@@ -23,7 +23,7 @@ SELECT elt(2, 'abc', 'defg'),
        elt(null, 'abc', 'defg'),
        elt(0, 'abc', 'defg'),
        elt(3, 'abc', 'defg')
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 POSTHOOK: query: EXPLAIN
 SELECT elt(2, 'abc', 'defg'),
@@ -37,59 +37,48 @@ SELECT elt(2, 'abc', 'defg'),
        elt(null, 'abc', 'defg'),
        elt(0, 'abc', 'defg'),
        elt(3, 'abc', 'defg')
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION elt 2 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 3 'aa' 'bb' 'cc' 'dd' 'ee' 'ff' 'gg')) (TOK_SELEXPR (TOK_FUNCTION elt '1' 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_TINYINT '2'))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_SMALLINT '12345'))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_BIGINT '123456789012'))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_FLOAT 1.25))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_DOUBLE 16.0))) (TOK_SELEXPR (TOK_FUNCTION elt TOK_NULL 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 0 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 3 'abc' 'defg'))) (TOK_LIMIT 1)))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION elt 2 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 3 'aa' 'bb' 'cc' 'dd' 'ee' 'ff' 'gg')) (TOK_SELEXPR (TOK_FUNCTION elt '1' 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_TINYINT '2'))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_SMALLINT '12345'))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_BIGINT '123456789012'))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_FLOAT 1.25))) (TOK_SELEXPR (TOK_FUNCTION elt 2 'aa' (TOK_FUNCTION TOK_DOUBLE 16.0))) (TOK_SELEXPR (TOK_FUNCTION elt TOK_NULL 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 0 'abc' 'defg')) (TOK_SELEXPR (TOK_FUNCTION elt 3 'abc' 'defg')))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src 
-          TableScan
-            alias: src
-            Select Operator
-              expressions:
-                    expr: elt(2, 'abc', 'defg')
-                    type: string
-                    expr: elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg')
-                    type: string
-                    expr: elt('1', 'abc', 'defg')
-                    type: string
-                    expr: elt(2, 'aa', UDFToByte('2'))
-                    type: string
-                    expr: elt(2, 'aa', UDFToShort('12345'))
-                    type: string
-                    expr: elt(2, 'aa', UDFToLong('123456789012'))
-                    type: string
-                    expr: elt(2, 'aa', UDFToFloat(1.25))
-                    type: string
-                    expr: elt(2, 'aa', 16.0)
-                    type: string
-                    expr: elt(null, 'abc', 'defg')
-                    type: string
-                    expr: elt(0, 'abc', 'defg')
-                    type: string
-                    expr: elt(3, 'abc', 'defg')
-                    type: string
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
-              Limit
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
-      limit: 1
+      limit: -1
+      Processor Tree:
+        TableScan
+          alias: src
+          Row Limit Per Split: 1
+          Select Operator
+            expressions:
+                  expr: elt(2, 'abc', 'defg')
+                  type: string
+                  expr: elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg')
+                  type: string
+                  expr: elt('1', 'abc', 'defg')
+                  type: string
+                  expr: elt(2, 'aa', UDFToByte('2'))
+                  type: string
+                  expr: elt(2, 'aa', UDFToShort('12345'))
+                  type: string
+                  expr: elt(2, 'aa', UDFToLong('123456789012'))
+                  type: string
+                  expr: elt(2, 'aa', UDFToFloat(1.25))
+                  type: string
+                  expr: elt(2, 'aa', 16.0)
+                  type: string
+                  expr: elt(null, 'abc', 'defg')
+                  type: string
+                  expr: elt(0, 'abc', 'defg')
+                  type: string
+                  expr: elt(3, 'abc', 'defg')
+                  type: string
+            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
+            ListSink
 
 
 PREHOOK: query: SELECT elt(2, 'abc', 'defg'),
@@ -103,7 +92,7 @@ PREHOOK: query: SELECT elt(2, 'abc', 'de
        elt(null, 'abc', 'defg'),
        elt(0, 'abc', 'defg'),
        elt(3, 'abc', 'defg')
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -118,7 +107,7 @@ POSTHOOK: query: SELECT elt(2, 'abc', 'd
        elt(null, 'abc', 'defg'),
        elt(0, 'abc', 'defg'),
        elt(3, 'abc', 'defg')
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_equal.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_equal.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_equal.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_equal.q.out Tue Nov  5 07:01:32 2013
@@ -20,11 +20,11 @@ POSTHOOK: query: DESCRIBE FUNCTION EXTEN
 POSTHOOK: type: DESCFUNCTION
 a == b - Returns TRUE if a equals b and false otherwise
 Synonyms: =
-PREHOOK: query: SELECT true=false, false=true, false=false, true=true, NULL=NULL, true=NULL, NULL=true, false=NULL, NULL=false FROM src LIMIT 1
+PREHOOK: query: SELECT true=false, false=true, false=false, true=true, NULL=NULL, true=NULL, NULL=true, false=NULL, NULL=false FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT true=false, false=true, false=false, true=true, NULL=NULL, true=NULL, NULL=true, false=NULL, NULL=false FROM src LIMIT 1
+POSTHOOK: query: SELECT true=false, false=true, false=false, true=true, NULL=NULL, true=NULL, NULL=true, false=NULL, NULL=false FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -39,11 +39,11 @@ PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION EXTENDED <=>
 POSTHOOK: type: DESCFUNCTION
 a <=> b - Returns same result with EQUAL(=) operator for non-null operands, but returns TRUE if both are NULL, FALSE if one of the them is NULL
-PREHOOK: query: SELECT true<=>false, false<=>true, false<=>false, true<=>true, NULL<=>NULL, true<=>NULL, NULL<=>true, false<=>NULL, NULL<=>false FROM src LIMIT 1
+PREHOOK: query: SELECT true<=>false, false<=>true, false<=>false, true<=>true, NULL<=>NULL, true<=>NULL, NULL<=>true, false<=>NULL, NULL<=>false FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT true<=>false, false<=>true, false<=>false, true<=>true, NULL<=>NULL, true<=>NULL, NULL<=>true, false<=>NULL, NULL<=>false FROM src LIMIT 1
+POSTHOOK: query: SELECT true<=>false, false<=>true, false<=>false, true<=>true, NULL<=>NULL, true<=>NULL, NULL<=>true, false<=>NULL, NULL<=>false FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_explode.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_explode.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_explode.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_explode.q.out Tue Nov  5 07:01:32 2013
@@ -8,12 +8,12 @@ PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION EXTENDED explode
 POSTHOOK: type: DESCFUNCTION
 explode(a) - separates the elements of array a into multiple rows, or the elements of a map into multiple rows and columns 
-PREHOOK: query: EXPLAIN EXTENDED SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3
+PREHOOK: query: EXPLAIN EXTENDED SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN EXTENDED SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3
+POSTHOOK: query: EXPLAIN EXTENDED SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION array 1 2 3)) myCol)) (TOK_LIMIT 3)))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION array 1 2 3)) myCol))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -26,6 +26,7 @@ STAGE PLANS:
         src 
           TableScan
             alias: src
+            Row Limit Per Split: 1
             GatherStats: false
             Select Operator
               expressions:
@@ -34,27 +35,26 @@ STAGE PLANS:
               outputColumnNames: _col0
               UDTF Operator
                 function name: explode
-                Limit
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 0
-#### A masked pattern was here ####
-                    NumFilesPerFileSink: 1
-#### A masked pattern was here ####
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        properties:
-                          columns col
-                          columns.types int
-                          escape.delim \
-                          hive.serialization.extend.nesting.levels true
-                          serialization.format 1
-                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    TotalFiles: 1
-                    GatherStats: false
-                    MultiFileSpray: false
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      properties:
+                        columns col
+                        columns.types int
+                        escape.delim \
+                        hive.serialization.extend.nesting.levels true
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
       Path -> Alias:
 #### A masked pattern was here ####
       Path -> Partition:
@@ -70,7 +70,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.src
               numFiles 1
-              numPartitions 0
               numRows 0
               rawDataSize 0
               serialization.ddl struct src { string key, string value}
@@ -89,7 +88,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.src
                 numFiles 1
-                numPartitions 0
                 numRows 0
                 rawDataSize 0
                 serialization.ddl struct src { string key, string value}
@@ -105,19 +103,18 @@ STAGE PLANS:
 
   Stage: Stage-0
     Fetch Operator
-      limit: 3
+      limit: -1
 
 
-PREHOOK: query: EXPLAIN EXTENDED SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3) a GROUP BY a.myCol
+PREHOOK: query: EXPLAIN EXTENDED SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)) a GROUP BY a.myCol
 PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN EXTENDED SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3) a GROUP BY a.myCol
+POSTHOOK: query: EXPLAIN EXTENDED SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)) a GROUP BY a.myCol
 POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION array 1 2 3)) myCol)) (TOK_LIMIT 3))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) myCol)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL a) myCol))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION array 1 2 3)) myCol)))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) myCol)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL a) myCol))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
   Stage-0 is a root stage
 
 STAGE PLANS:
@@ -127,6 +124,7 @@ STAGE PLANS:
         a:src 
           TableScan
             alias: src
+            Row Limit Per Split: 1
             GatherStats: false
             Select Operator
               expressions:
@@ -135,13 +133,32 @@ STAGE PLANS:
               outputColumnNames: _col0
               UDTF Operator
                 function name: explode
-                Limit
-                  Reduce Output Operator
-                    sort order: 
-                    tag: -1
-                    value expressions:
+                Select Operator
+                  expressions:
+                        expr: col
+                        type: int
+                  outputColumnNames: col
+                  Group By Operator
+                    aggregations:
+                          expr: count(1)
+                    bucketGroup: false
+                    keys:
                           expr: col
                           type: int
+                    mode: hash
+                    outputColumnNames: _col0, _col1
+                    Reduce Output Operator
+                      key expressions:
+                            expr: _col0
+                            type: int
+                      sort order: +
+                      Map-reduce partition columns:
+                            expr: _col0
+                            type: int
+                      tag: -1
+                      value expressions:
+                            expr: _col1
+                            type: bigint
       Path -> Alias:
 #### A masked pattern was here ####
       Path -> Partition:
@@ -157,7 +174,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.src
               numFiles 1
-              numPartitions 0
               numRows 0
               rawDataSize 0
               serialization.ddl struct src { string key, string value}
@@ -176,7 +192,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.src
                 numFiles 1
-                numPartitions 0
                 numRows 0
                 rawDataSize 0
                 serialization.ddl struct src { string key, string value}
@@ -191,85 +206,6 @@ STAGE PLANS:
         /src [a:src]
       Needs Tagging: false
       Reduce Operator Tree:
-        Extract
-          Limit
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: int
-              outputColumnNames: _col0
-              Group By Operator
-                aggregations:
-                      expr: count(1)
-                bucketGroup: false
-                keys:
-                      expr: _col0
-                      type: int
-                mode: hash
-                outputColumnNames: _col0, _col1
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      properties:
-                        columns _col0,_col1
-                        columns.types int,bigint
-                        escape.delim \
-                        serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
-
-  Stage: Stage-2
-    Map Reduce
-      Alias -> Map Operator Tree:
-#### A masked pattern was here ####
-          TableScan
-            GatherStats: false
-            Reduce Output Operator
-              key expressions:
-                    expr: _col0
-                    type: int
-              sort order: +
-              Map-reduce partition columns:
-                    expr: _col0
-                    type: int
-              tag: -1
-              value expressions:
-                    expr: _col1
-                    type: bigint
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: -mr-10002
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col1
-              columns.types int,bigint
-              escape.delim \
-              serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-            serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col1
-                columns.types int,bigint
-                escape.delim \
-                serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-              serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-      Needs Tagging: false
-      Reduce Operator Tree:
         Group By Operator
           aggregations:
                 expr: count(VALUE._col0)
@@ -312,45 +248,45 @@ STAGE PLANS:
       limit: -1
 
 
-PREHOOK: query: SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3
+PREHOOK: query: SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3
+POSTHOOK: query: SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 1
 2
 3
-PREHOOK: query: SELECT explode(array(1,2,3)) AS (myCol) FROM src LIMIT 3
+PREHOOK: query: SELECT explode(array(1,2,3)) AS (myCol) FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT explode(array(1,2,3)) AS (myCol) FROM src LIMIT 3
+POSTHOOK: query: SELECT explode(array(1,2,3)) AS (myCol) FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 1
 2
 3
-PREHOOK: query: SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3) a GROUP BY a.myCol
+PREHOOK: query: SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)) a GROUP BY a.myCol
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src LIMIT 3) a GROUP BY a.myCol
+POSTHOOK: query: SELECT a.myCol, count(1) FROM (SELECT explode(array(1,2,3)) AS myCol FROM src tablesample (1 rows)) a GROUP BY a.myCol
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 1	1
 2	1
 3	1
-PREHOOK: query: EXPLAIN EXTENDED SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3
+PREHOOK: query: EXPLAIN EXTENDED SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN EXTENDED SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3
+POSTHOOK: query: EXPLAIN EXTENDED SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION map 1 'one' 2 'two' 3 'three')) key val)) (TOK_LIMIT 3)))
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION map 1 'one' 2 'two' 3 'three')) key val))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
@@ -363,6 +299,7 @@ STAGE PLANS:
         src 
           TableScan
             alias: src
+            Row Limit Per Split: 1
             GatherStats: false
             Select Operator
               expressions:
@@ -371,27 +308,26 @@ STAGE PLANS:
               outputColumnNames: _col0
               UDTF Operator
                 function name: explode
-                Limit
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 0
-#### A masked pattern was here ####
-                    NumFilesPerFileSink: 1
-#### A masked pattern was here ####
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        properties:
-                          columns key,value
-                          columns.types int:string
-                          escape.delim \
-                          hive.serialization.extend.nesting.levels true
-                          serialization.format 1
-                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    TotalFiles: 1
-                    GatherStats: false
-                    MultiFileSpray: false
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      properties:
+                        columns key,value
+                        columns.types int:string
+                        escape.delim \
+                        hive.serialization.extend.nesting.levels true
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
       Path -> Alias:
 #### A masked pattern was here ####
       Path -> Partition:
@@ -407,7 +343,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.src
               numFiles 1
-              numPartitions 0
               numRows 0
               rawDataSize 0
               serialization.ddl struct src { string key, string value}
@@ -426,7 +361,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.src
                 numFiles 1
-                numPartitions 0
                 numRows 0
                 rawDataSize 0
                 serialization.ddl struct src { string key, string value}
@@ -442,19 +376,18 @@ STAGE PLANS:
 
   Stage: Stage-0
     Fetch Operator
-      limit: 3
+      limit: -1
 
 
-PREHOOK: query: EXPLAIN EXTENDED SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3) a GROUP BY a.key, a.val
+PREHOOK: query: EXPLAIN EXTENDED SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)) a GROUP BY a.key, a.val
 PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN EXTENDED SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3) a GROUP BY a.key, a.val
+POSTHOOK: query: EXPLAIN EXTENDED SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)) a GROUP BY a.key, a.val
 POSTHOOK: type: QUERY
 ABSTRACT SYNTAX TREE:
-  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION map 1 'one' 2 'two' 3 'three')) key val)) (TOK_LIMIT 3))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) val)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL a) val))))
+  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src) (TOK_TABLESPLITSAMPLE TOK_ROWCOUNT 1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION explode (TOK_FUNCTION map 1 'one' 2 'two' 3 'three')) key val)))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) val)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL a) val))))
 
 STAGE DEPENDENCIES:
   Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
   Stage-0 is a root stage
 
 STAGE PLANS:
@@ -464,6 +397,7 @@ STAGE PLANS:
         a:src 
           TableScan
             alias: src
+            Row Limit Per Split: 1
             GatherStats: false
             Select Operator
               expressions:
@@ -472,15 +406,40 @@ STAGE PLANS:
               outputColumnNames: _col0
               UDTF Operator
                 function name: explode
-                Limit
-                  Reduce Output Operator
-                    sort order: 
-                    tag: -1
-                    value expressions:
+                Select Operator
+                  expressions:
+                        expr: key
+                        type: int
+                        expr: value
+                        type: string
+                  outputColumnNames: key, value
+                  Group By Operator
+                    aggregations:
+                          expr: count(1)
+                    bucketGroup: false
+                    keys:
                           expr: key
                           type: int
                           expr: value
                           type: string
+                    mode: hash
+                    outputColumnNames: _col0, _col1, _col2
+                    Reduce Output Operator
+                      key expressions:
+                            expr: _col0
+                            type: int
+                            expr: _col1
+                            type: string
+                      sort order: ++
+                      Map-reduce partition columns:
+                            expr: _col0
+                            type: int
+                            expr: _col1
+                            type: string
+                      tag: -1
+                      value expressions:
+                            expr: _col2
+                            type: bigint
       Path -> Alias:
 #### A masked pattern was here ####
       Path -> Partition:
@@ -496,7 +455,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
               name default.src
               numFiles 1
-              numPartitions 0
               numRows 0
               rawDataSize 0
               serialization.ddl struct src { string key, string value}
@@ -515,7 +473,6 @@ STAGE PLANS:
 #### A masked pattern was here ####
                 name default.src
                 numFiles 1
-                numPartitions 0
                 numRows 0
                 rawDataSize 0
                 serialization.ddl struct src { string key, string value}
@@ -530,93 +487,6 @@ STAGE PLANS:
         /src [a:src]
       Needs Tagging: false
       Reduce Operator Tree:
-        Extract
-          Limit
-            Select Operator
-              expressions:
-                    expr: _col0
-                    type: int
-                    expr: _col1
-                    type: string
-              outputColumnNames: _col0, _col1
-              Group By Operator
-                aggregations:
-                      expr: count(1)
-                bucketGroup: false
-                keys:
-                      expr: _col0
-                      type: int
-                      expr: _col1
-                      type: string
-                mode: hash
-                outputColumnNames: _col0, _col1, _col2
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 0
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-                  table:
-                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      properties:
-                        columns _col0,_col1,_col2
-                        columns.types int,string,bigint
-                        escape.delim \
-                        serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                      serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                  TotalFiles: 1
-                  GatherStats: false
-                  MultiFileSpray: false
-
-  Stage: Stage-2
-    Map Reduce
-      Alias -> Map Operator Tree:
-#### A masked pattern was here ####
-          TableScan
-            GatherStats: false
-            Reduce Output Operator
-              key expressions:
-                    expr: _col0
-                    type: int
-                    expr: _col1
-                    type: string
-              sort order: ++
-              Map-reduce partition columns:
-                    expr: _col0
-                    type: int
-                    expr: _col1
-                    type: string
-              tag: -1
-              value expressions:
-                    expr: _col2
-                    type: bigint
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: -mr-10002
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col1,_col2
-              columns.types int,string,bigint
-              escape.delim \
-              serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-            serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col1,_col2
-                columns.types int,string,bigint
-                escape.delim \
-                serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-              serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-      Needs Tagging: false
-      Reduce Operator Tree:
         Group By Operator
           aggregations:
                 expr: count(VALUE._col0)
@@ -663,22 +533,22 @@ STAGE PLANS:
       limit: -1
 
 
-PREHOOK: query: SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3
+PREHOOK: query: SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3
+POSTHOOK: query: SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 1	one
 2	two
 3	three
-PREHOOK: query: SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3) a GROUP BY a.key, a.val
+PREHOOK: query: SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)) a GROUP BY a.key, a.val
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src LIMIT 3) a GROUP BY a.key, a.val
+POSTHOOK: query: SELECT a.key, a.val, count(1) FROM (SELECT explode(map(1,'one',2,'two',3,'three')) AS (key,val) FROM src tablesample (1 rows)) a GROUP BY a.key, a.val
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -694,11 +564,11 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table lazy_array_map (map_col map<int,string>, array_col array<string>)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@lazy_array_map
-PREHOOK: query: INSERT OVERWRITE TABLE lazy_array_map select map(1,'one',2,'two',3,'three'), array('100','200','300') FROM src LIMIT 1
+PREHOOK: query: INSERT OVERWRITE TABLE lazy_array_map select map(1,'one',2,'two',3,'three'), array('100','200','300') FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@lazy_array_map
-POSTHOOK: query: INSERT OVERWRITE TABLE lazy_array_map select map(1,'one',2,'two',3,'three'), array('100','200','300') FROM src LIMIT 1
+POSTHOOK: query: INSERT OVERWRITE TABLE lazy_array_map select map(1,'one',2,'two',3,'three'), array('100','200','300') FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@lazy_array_map

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_field.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_field.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_field.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_field.q.out Tue Nov  5 07:01:32 2013
@@ -13,7 +13,7 @@ PREHOOK: query: SELECT
   field("x", "a", "b", "c", "d"),
   field(NULL, "a", "b", "c", "d"),
   field(0, 1, 2, 3, 4)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -21,7 +21,7 @@ POSTHOOK: query: SELECT
   field("x", "a", "b", "c", "d"),
   field(NULL, "a", "b", "c", "d"),
   field(0, 1, 2, 3, 4)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -32,7 +32,7 @@ PREHOOK: query: SELECT
   field("c", "a", "b", "c", "d"),
   field("d", "a", "b", "c", "d"),
   field("d", "a", "b", NULL, "d")
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -42,7 +42,7 @@ POSTHOOK: query: SELECT
   field("c", "a", "b", "c", "d"),
   field("d", "a", "b", "c", "d"),
   field("d", "a", "b", NULL, "d")
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -53,7 +53,7 @@ PREHOOK: query: SELECT
   field(3, 1, 2, 3, 4),
   field(4, 1, 2, 3, 4),
   field(4, 1, 2, NULL, 4)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -63,7 +63,7 @@ POSTHOOK: query: SELECT
   field(3, 1, 2, 3, 4),
   field(4, 1, 2, 3, 4),
   field(4, 1, 2, NULL, 4)
-FROM src LIMIT 1
+FROM src tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
@@ -73,10 +73,10 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE test_table(col1 STRING, col2 STRING) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@test_table
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE test_table
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE test_table
 PREHOOK: type: LOAD
 PREHOOK: Output: default@test_table
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE test_table
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE test_table
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@test_table
 PREHOOK: query: select col1,col2,
@@ -116,10 +116,10 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE test_table1(col1 int, col2 string) STORED AS TEXTFILE
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@test_table1
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE test_table1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE test_table1
 PREHOOK: type: LOAD
 PREHOOK: Output: default@test_table1
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/kv1.txt' INTO TABLE test_table1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/kv1.txt' INTO TABLE test_table1
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@test_table1
 PREHOOK: query: select col1,col2,

Modified: hive/branches/tez/ql/src/test/results/clientpositive/udf_find_in_set.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/udf_find_in_set.q.out?rev=1538880&r1=1538879&r2=1538880&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/udf_find_in_set.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/udf_find_in_set.q.out Tue Nov  5 07:01:32 2013
@@ -24,32 +24,21 @@ ABSTRACT SYNTAX TREE:
   (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION find_in_set (. (TOK_TABLE_OR_COL src1) key) (TOK_FUNCTION concat (. (TOK_TABLE_OR_COL src1) key) ',' (. (TOK_TABLE_OR_COL src1) value)))))))
 
 STAGE DEPENDENCIES:
-  Stage-1 is a root stage
   Stage-0 is a root stage
 
 STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Alias -> Map Operator Tree:
-        src1 
-          TableScan
-            alias: src1
-            Select Operator
-              expressions:
-                    expr: find_in_set(key, concat(key, ',', value))
-                    type: int
-              outputColumnNames: _col0
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-                table:
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
   Stage: Stage-0
     Fetch Operator
       limit: -1
+      Processor Tree:
+        TableScan
+          alias: src1
+          Select Operator
+            expressions:
+                  expr: find_in_set(key, concat(key, ',', value))
+                  type: int
+            outputColumnNames: _col0
+            ListSink
 
 
 PREHOOK: query: FROM src1 SELECT find_in_set(src1.key,concat(src1.key,',',src1.value))
@@ -85,119 +74,119 @@ POSTHOOK: Input: default@src1
 1
 1
 1
-PREHOOK: query: SELECT find_in_set('ab','ab,abc,abcde') FROM src1 LIMIT 1
+PREHOOK: query: SELECT find_in_set('ab','ab,abc,abcde') FROM src1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT find_in_set('ab','ab,abc,abcde') FROM src1 LIMIT 1
+POSTHOOK: query: SELECT find_in_set('ab','ab,abc,abcde') FROM src1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####
 1
-PREHOOK: query: SELECT find_in_set('ab','abc,ab,bbb') FROM src1 LIMIT 1
+PREHOOK: query: SELECT find_in_set('ab','abc,ab,bbb') FROM src1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT find_in_set('ab','abc,ab,bbb') FROM src1 LIMIT 1
+POSTHOOK: query: SELECT find_in_set('ab','abc,ab,bbb') FROM src1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####
 2
-PREHOOK: query: SELECT find_in_set('ab','def,abc,ab') FROM src1 LIMIT 1
+PREHOOK: query: SELECT find_in_set('ab','def,abc,ab') FROM src1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT find_in_set('ab','def,abc,ab') FROM src1 LIMIT 1
+POSTHOOK: query: SELECT find_in_set('ab','def,abc,ab') FROM src1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####
 3
-PREHOOK: query: SELECT find_in_set('ab','abc,abd,abf') FROM src1 LIMIT 1
+PREHOOK: query: SELECT find_in_set('ab','abc,abd,abf') FROM src1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT find_in_set('ab','abc,abd,abf') FROM src1 LIMIT 1
+POSTHOOK: query: SELECT find_in_set('ab','abc,abd,abf') FROM src1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####
 0
-PREHOOK: query: SELECT find_in_set(null,'a,b,c') FROM src1 LIMIT 1
+PREHOOK: query: SELECT find_in_set(null,'a,b,c') FROM src1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT find_in_set(null,'a,b,c') FROM src1 LIMIT 1
+POSTHOOK: query: SELECT find_in_set(null,'a,b,c') FROM src1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####
 NULL
-PREHOOK: query: SELECT find_in_set('a',null) FROM src1 LIMIT 1
+PREHOOK: query: SELECT find_in_set('a',null) FROM src1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT find_in_set('a',null) FROM src1 LIMIT 1
+POSTHOOK: query: SELECT find_in_set('a',null) FROM src1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####
 NULL
-PREHOOK: query: SELECT find_in_set('', '') FROM src1 LIMIT 1
+PREHOOK: query: SELECT find_in_set('', '') FROM src1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT find_in_set('', '') FROM src1 LIMIT 1
+POSTHOOK: query: SELECT find_in_set('', '') FROM src1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####
 1
-PREHOOK: query: SELECT find_in_set('',',') FROM src1 LIMIT 1
+PREHOOK: query: SELECT find_in_set('',',') FROM src1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT find_in_set('',',') FROM src1 LIMIT 1
+POSTHOOK: query: SELECT find_in_set('',',') FROM src1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####
 1
-PREHOOK: query: SELECT find_in_set('','a,,b') FROM src1 LIMIT 1
+PREHOOK: query: SELECT find_in_set('','a,,b') FROM src1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT find_in_set('','a,,b') FROM src1 LIMIT 1
+POSTHOOK: query: SELECT find_in_set('','a,,b') FROM src1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####
 2
-PREHOOK: query: SELECT find_in_set('','a,b,') FROM src1 LIMIT 1
+PREHOOK: query: SELECT find_in_set('','a,b,') FROM src1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT find_in_set('','a,b,') FROM src1 LIMIT 1
+POSTHOOK: query: SELECT find_in_set('','a,b,') FROM src1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####
 3
-PREHOOK: query: SELECT find_in_set(',','a,b,d,') FROM src1 LIMIT 1
+PREHOOK: query: SELECT find_in_set(',','a,b,d,') FROM src1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT find_in_set(',','a,b,d,') FROM src1 LIMIT 1
+POSTHOOK: query: SELECT find_in_set(',','a,b,d,') FROM src1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####
 0
-PREHOOK: query: SELECT find_in_set('a','') FROM src1 LIMIT 1
+PREHOOK: query: SELECT find_in_set('a','') FROM src1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT find_in_set('a','') FROM src1 LIMIT 1
+POSTHOOK: query: SELECT find_in_set('a','') FROM src1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####
 0
-PREHOOK: query: SELECT find_in_set('a,','a,b,c,d') FROM src1 LIMIT 1
+PREHOOK: query: SELECT find_in_set('a,','a,b,c,d') FROM src1 tablesample (1 rows)
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 #### A masked pattern was here ####
-POSTHOOK: query: SELECT find_in_set('a,','a,b,c,d') FROM src1 LIMIT 1
+POSTHOOK: query: SELECT find_in_set('a,','a,b,c,d') FROM src1 tablesample (1 rows)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####